atlan-application-sdk 0.1.1rc21__py3-none-any.whl → 0.1.1rc23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. application_sdk/activities/__init__.py +21 -4
  2. application_sdk/activities/common/utils.py +29 -0
  3. application_sdk/activities/metadata_extraction/sql.py +2 -2
  4. application_sdk/activities/query_extraction/sql.py +5 -5
  5. application_sdk/clients/temporal.py +5 -4
  6. application_sdk/common/credential_utils.py +3 -1
  7. application_sdk/common/utils.py +0 -34
  8. application_sdk/constants.py +23 -5
  9. application_sdk/inputs/objectstore.py +5 -0
  10. application_sdk/inputs/secretstore.py +39 -27
  11. application_sdk/inputs/statestore.py +67 -35
  12. application_sdk/observability/logger_adaptor.py +12 -3
  13. application_sdk/observability/metrics_adaptor.py +5 -3
  14. application_sdk/observability/observability.py +10 -8
  15. application_sdk/observability/traces_adaptor.py +2 -2
  16. application_sdk/observability/utils.py +22 -0
  17. application_sdk/outputs/objectstore.py +2 -0
  18. application_sdk/outputs/parquet.py +4 -1
  19. application_sdk/outputs/secretstore.py +15 -6
  20. application_sdk/outputs/statestore.py +71 -17
  21. application_sdk/server/fastapi/__init__.py +19 -8
  22. application_sdk/server/fastapi/models.py +3 -11
  23. application_sdk/test_utils/e2e/__init__.py +0 -3
  24. application_sdk/version.py +1 -1
  25. application_sdk/workflows/__init__.py +0 -3
  26. application_sdk/workflows/metadata_extraction/sql.py +0 -7
  27. application_sdk/workflows/query_extraction/sql.py +0 -5
  28. {atlan_application_sdk-0.1.1rc21.dist-info → atlan_application_sdk-0.1.1rc23.dist-info}/METADATA +2 -2
  29. {atlan_application_sdk-0.1.1rc21.dist-info → atlan_application_sdk-0.1.1rc23.dist-info}/RECORD +32 -32
  30. {atlan_application_sdk-0.1.1rc21.dist-info → atlan_application_sdk-0.1.1rc23.dist-info}/WHEEL +0 -0
  31. {atlan_application_sdk-0.1.1rc21.dist-info → atlan_application_sdk-0.1.1rc23.dist-info}/licenses/LICENSE +0 -0
  32. {atlan_application_sdk-0.1.1rc21.dist-info → atlan_application_sdk-0.1.1rc23.dist-info}/licenses/NOTICE +0 -0
@@ -13,16 +13,23 @@ Example:
13
13
  ... await state.handler.do_something()
14
14
  """
15
15
 
16
+ import os
16
17
  from abc import ABC
17
18
  from typing import Any, Dict, Generic, Optional, TypeVar
18
19
 
19
20
  from pydantic import BaseModel
20
21
  from temporalio import activity
21
22
 
22
- from application_sdk.activities.common.utils import auto_heartbeater, get_workflow_id
23
+ from application_sdk.activities.common.utils import (
24
+ auto_heartbeater,
25
+ build_output_path,
26
+ get_workflow_id,
27
+ get_workflow_run_id,
28
+ )
23
29
  from application_sdk.common.error_codes import OrchestratorError
30
+ from application_sdk.constants import TEMPORARY_PATH
24
31
  from application_sdk.handlers import HandlerInterface
25
- from application_sdk.inputs.statestore import StateStoreInput
32
+ from application_sdk.inputs.statestore import StateStoreInput, StateType
26
33
  from application_sdk.observability.logger_adaptor import get_logger
27
34
 
28
35
  logger = get_logger(__name__)
@@ -177,13 +184,23 @@ class ActivitiesInterface(ABC, Generic[ActivitiesStateType]):
177
184
  Raises:
178
185
  IOError: If configuration cannot be retrieved from state store
179
186
  """
180
- workflow_id = workflow_config.get("workflow_id")
187
+ workflow_id = workflow_config.get("workflow_id", get_workflow_id())
181
188
  if not workflow_id:
182
189
  raise ValueError("workflow_id is required in workflow_config")
183
190
 
184
191
  try:
185
192
  # This already handles the Dapr call internally
186
- return StateStoreInput.extract_configuration(workflow_id)
193
+ workflow_args = StateStoreInput.get_state(workflow_id, StateType.WORKFLOWS)
194
+ workflow_args["output_prefix"] = workflow_args.get(
195
+ "output_prefix", TEMPORARY_PATH
196
+ )
197
+ workflow_args["output_path"] = os.path.join(
198
+ workflow_args["output_prefix"], build_output_path()
199
+ )
200
+ workflow_args["workflow_id"] = workflow_id
201
+ workflow_args["workflow_run_id"] = get_workflow_run_id()
202
+ return workflow_args
203
+
187
204
  except Exception as e:
188
205
  logger.error(
189
206
  f"Failed to retrieve workflow configuration for {workflow_id}: {str(e)}",
@@ -11,6 +11,7 @@ from typing import Any, Awaitable, Callable, Optional, TypeVar, cast
11
11
 
12
12
  from temporalio import activity
13
13
 
14
+ from application_sdk.constants import APPLICATION_NAME, WORKFLOW_OUTPUT_PATH_TEMPLATE
14
15
  from application_sdk.observability.logger_adaptor import get_logger
15
16
 
16
17
  logger = get_logger(__name__)
@@ -43,6 +44,34 @@ def get_workflow_id() -> str:
43
44
  raise Exception("Failed to get workflow id")
44
45
 
45
46
 
47
+ def get_workflow_run_id() -> str:
48
+ """Get the workflow run ID from the current activity."""
49
+ try:
50
+ return activity.info().workflow_run_id
51
+ except Exception as e:
52
+ logger.error("Failed to get workflow run id", exc_info=e)
53
+ raise Exception("Failed to get workflow run id")
54
+
55
+
56
+ def build_output_path() -> str:
57
+ """Build a standardized output path for workflow artifacts.
58
+
59
+ This method creates a consistent output path format across all workflows using the WORKFLOW_OUTPUT_PATH_TEMPLATE constant.
60
+
61
+ Returns:
62
+ str: The standardized output path.
63
+
64
+ Example:
65
+ >>> build_output_path()
66
+ "artifacts/apps/appName/workflows/wf-123/run-456"
67
+ """
68
+ return WORKFLOW_OUTPUT_PATH_TEMPLATE.format(
69
+ application_name=APPLICATION_NAME,
70
+ workflow_id=get_workflow_id(),
71
+ run_id=get_workflow_run_id(),
72
+ )
73
+
74
+
46
75
  def auto_heartbeater(fn: F) -> F:
47
76
  """Decorator that automatically sends heartbeats during activity execution.
48
77
 
@@ -137,8 +137,8 @@ class BaseSQLMetadataExtractionActivities(ActivitiesInterface):
137
137
  self._state[workflow_id].handler = handler
138
138
 
139
139
  if "credential_guid" in workflow_args:
140
- credentials = SecretStoreInput.extract_credentials(
141
- workflow_args["credential_guid"]
140
+ credentials = await SecretStoreInput.fetch_secret(
141
+ secret_key=workflow_args["credential_guid"]
142
142
  )
143
143
  await sql_client.load(credentials)
144
144
 
@@ -128,8 +128,8 @@ class SQLQueryExtractionActivities(ActivitiesInterface):
128
128
  workflow_id = get_workflow_id()
129
129
  sql_client = self.sql_client_class()
130
130
  if "credential_guid" in workflow_args:
131
- credentials = SecretStoreInput.extract_credentials(
132
- workflow_args["credential_guid"]
131
+ credentials = await SecretStoreInput.fetch_secret(
132
+ secret_key=workflow_args["credential_guid"]
133
133
  )
134
134
  await sql_client.load(credentials)
135
135
 
@@ -398,7 +398,7 @@ class SQLQueryExtractionActivities(ActivitiesInterface):
398
398
  Raises:
399
399
  Exception: If marker file writing or object store upload fails
400
400
  """
401
- output_path = workflow_args["output_path"].rsplit("/", 2)[0]
401
+ output_path = workflow_args["output_path"].rsplit("/", 1)[0]
402
402
  logger.info(f"Writing marker file to {output_path}")
403
403
  marker_file_path = os.path.join(output_path, "markerfile")
404
404
 
@@ -436,7 +436,7 @@ class SQLQueryExtractionActivities(ActivitiesInterface):
436
436
  Exception: If marker file reading fails (logged as warning, not re-raised)
437
437
  """
438
438
  try:
439
- output_path = workflow_args["output_path"].rsplit("/", 2)[0]
439
+ output_path = workflow_args["output_path"].rsplit("/", 1)[0]
440
440
  marker_file_path = os.path.join(output_path, "markerfile")
441
441
  logger.info(f"Downloading marker file from {marker_file_path}")
442
442
 
@@ -485,7 +485,7 @@ class SQLQueryExtractionActivities(ActivitiesInterface):
485
485
 
486
486
  current_marker = self.read_marker(workflow_args)
487
487
  if current_marker:
488
- miner_args.miner_start_time_epoch = current_marker
488
+ miner_args.current_marker = current_marker
489
489
 
490
490
  queries_sql_query = self.fetch_queries_sql.format(
491
491
  database_name_cleaned=miner_args.database_name_cleaned,
@@ -28,6 +28,7 @@ from application_sdk.constants import (
28
28
  WORKFLOW_NAMESPACE,
29
29
  WORKFLOW_PORT,
30
30
  )
31
+ from application_sdk.inputs.statestore import StateType
31
32
  from application_sdk.observability.logger_adaptor import get_logger
32
33
  from application_sdk.outputs.eventstore import (
33
34
  ApplicationEventNames,
@@ -312,13 +313,12 @@ class TemporalWorkflowClient(WorkflowClient):
312
313
  """
313
314
  if "credentials" in workflow_args:
314
315
  # remove credentials from workflow_args and add reference to credentials
315
- workflow_args["credential_guid"] = SecretStoreOutput.store_credentials(
316
+ workflow_args["credential_guid"] = await SecretStoreOutput.save_secret(
316
317
  workflow_args["credentials"]
317
318
  )
318
319
  del workflow_args["credentials"]
319
320
 
320
321
  workflow_id = workflow_args.get("workflow_id")
321
- output_prefix = workflow_args.get("output_prefix", "/tmp/output")
322
322
  if not workflow_id:
323
323
  # if workflow_id is not provided, create a new one
324
324
  workflow_id = workflow_args.get("argo_workflow_name", str(uuid.uuid4()))
@@ -327,11 +327,12 @@ class TemporalWorkflowClient(WorkflowClient):
327
327
  {
328
328
  "application_name": self.application_name,
329
329
  "workflow_id": workflow_id,
330
- "output_prefix": output_prefix,
331
330
  }
332
331
  )
333
332
 
334
- StateStoreOutput.store_configuration(workflow_id, workflow_args)
333
+ await StateStoreOutput.save_state_object(
334
+ id=workflow_id, value=workflow_args, type=StateType.WORKFLOWS
335
+ )
335
336
 
336
337
  logger.info(f"Created workflow config with ID: {workflow_id}")
337
338
 
@@ -43,7 +43,9 @@ async def resolve_credentials(credentials: Dict[str, Any]) -> Dict[str, Any]:
43
43
  )
44
44
 
45
45
  # Fetch and apply secret using SecretStoreInput
46
- secret_data = await SecretStoreInput.fetch_secret(credential_source, secret_key)
46
+ secret_data = await SecretStoreInput.fetch_secret(
47
+ secret_key=secret_key, component_name=credential_source
48
+ )
47
49
  return SecretStoreInput.apply_secret_values(credentials, secret_data)
48
50
 
49
51
  except Exception as e:
@@ -18,9 +18,7 @@ from typing import (
18
18
  )
19
19
 
20
20
  from application_sdk.common.error_codes import CommonError
21
- from application_sdk.inputs.statestore import StateStoreInput
22
21
  from application_sdk.observability.logger_adaptor import get_logger
23
- from application_sdk.outputs.statestore import StateStoreOutput
24
22
 
25
23
  logger = get_logger(__name__)
26
24
 
@@ -268,38 +266,6 @@ def normalize_filters(
268
266
  return normalized_filter_list
269
267
 
270
268
 
271
- def get_workflow_config(config_id: str) -> Dict[str, Any]:
272
- """Gets the workflow configuration from the state store using config id.
273
-
274
- Args:
275
- config_id: The configuration ID to retrieve.
276
-
277
- Returns:
278
- dict: The workflow configuration.
279
- """
280
- return StateStoreInput.extract_configuration(config_id)
281
-
282
-
283
- def update_workflow_config(config_id: str, config: Dict[str, Any]) -> Dict[str, Any]:
284
- """Updates the workflow configuration.
285
-
286
- Args:
287
- config_id: The configuration ID to update.
288
- config: The new configuration dictionary.
289
-
290
- Returns:
291
- dict: The updated workflow configuration.
292
- """
293
- extracted_config = get_workflow_config(config_id)
294
-
295
- for key in extracted_config.keys():
296
- if key in config and config[key] is not None:
297
- extracted_config[key] = config[key]
298
-
299
- StateStoreOutput.store_configuration(config_id, extracted_config)
300
- return extracted_config
301
-
302
-
303
269
  def read_sql_files(
304
270
  queries_prefix: str = f"{os.path.dirname(os.path.abspath(__file__))}/queries",
305
271
  ) -> Dict[str, str]:
@@ -44,6 +44,28 @@ APP_DASHBOARD_PORT = int(os.getenv("ATLAN_APP_DASHBOARD_PORT", "8000"))
44
44
  SQL_SERVER_MIN_VERSION = os.getenv("ATLAN_SQL_SERVER_MIN_VERSION")
45
45
  #: Path to the SQL queries directory
46
46
  SQL_QUERIES_PATH = os.getenv("ATLAN_SQL_QUERIES_PATH", "app/sql")
47
+ #: Whether to use local development mode (used for instance to fetch secrets from the local state store)
48
+ LOCAL_DEVELOPMENT = os.getenv("ATLAN_LOCAL_DEVELOPMENT", "false").lower() == "true"
49
+
50
+
51
+ # Output Path Constants
52
+ #: Output path format for workflows (example: objectstore://bucket/artifacts/apps/{application_name}/workflows/{workflow_id}/{workflow_run_id})
53
+ WORKFLOW_OUTPUT_PATH_TEMPLATE = (
54
+ "artifacts/apps/{application_name}/workflows/{workflow_id}/{run_id}"
55
+ )
56
+
57
+ # Temporary Path (used to store intermediate files)
58
+ TEMPORARY_PATH = os.getenv("ATLAN_TEMPORARY_PATH", "./local/tmp/")
59
+
60
+ # State Store Constants
61
+ #: Path template for state store files (example: objectstore://bucket/persistent-artifacts/apps/{application_name}/{state_type}/{id}/config.json)
62
+ STATE_STORE_PATH_TEMPLATE = (
63
+ "persistent-artifacts/apps/{application_name}/{state_type}/{id}/config.json"
64
+ )
65
+
66
+ # Observability Constants
67
+ #: Directory for storing observability data
68
+ OBSERVABILITY_DIR = "artifacts/apps/{application_name}/observability"
47
69
 
48
70
  # Workflow Client Constants
49
71
  #: Host address for the Temporal server
@@ -87,6 +109,7 @@ OBJECT_STORE_NAME = os.getenv("OBJECT_STORE_NAME", "objectstore")
87
109
  #: Name of the pubsub component in DAPR
88
110
  EVENT_STORE_NAME = os.getenv("EVENT_STORE_NAME", "eventstore")
89
111
 
112
+
90
113
  # Logger Constants
91
114
  #: Log level for the application (DEBUG, INFO, WARNING, ERROR, CRITICAL)
92
115
  LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper()
@@ -120,10 +143,6 @@ OTEL_QUEUE_SIZE = int(os.getenv("OTEL_QUEUE_SIZE", "2048"))
120
143
  #: AWS Session Name
121
144
  AWS_SESSION_NAME = os.getenv("AWS_SESSION_NAME", "temp-session")
122
145
 
123
- # Observability Constants
124
- #: Directory for storing observability data
125
- OBSERVABILITY_DIR = os.environ.get("ATLAN_OBSERVABILITY_DIR", "/tmp/observability")
126
-
127
146
  # Log batching configuration
128
147
  LOG_BATCH_SIZE = int(os.environ.get("ATLAN_LOG_BATCH_SIZE", 100))
129
148
  LOG_FLUSH_INTERVAL_SECONDS = int(os.environ.get("ATLAN_LOG_FLUSH_INTERVAL_SECONDS", 10))
@@ -133,7 +152,6 @@ LOG_RETENTION_DAYS = int(os.environ.get("ATLAN_LOG_RETENTION_DAYS", 30))
133
152
  LOG_CLEANUP_ENABLED = bool(os.environ.get("ATLAN_LOG_CLEANUP_ENABLED", False))
134
153
 
135
154
  # Log Location configuration
136
- LOG_DIR = os.environ.get("ATLAN_LOG_DIR", "/tmp/observability")
137
155
  LOG_FILE_NAME = os.environ.get("ATLAN_LOG_FILE_NAME", "log.parquet")
138
156
  # Hive Partitioning Configuration
139
157
  ENABLE_HIVE_PARTITIONING = (
@@ -88,11 +88,16 @@ class ObjectStoreInput:
88
88
 
89
89
  Args:
90
90
  download_file_prefix (str): The base path to calculate relative paths from.
91
+ example: /tmp/output
91
92
  file_path (str): The full path to where the file should be downloaded.
93
+ example: /tmp/output/persistent-artifacts/apps/myapp/data/wf-123/state.json
92
94
 
93
95
  Raises:
94
96
  Exception: If there's an error downloading the file from the object store.
95
97
  """
98
+ if not os.path.exists(os.path.dirname(file_path)):
99
+ os.makedirs(os.path.dirname(file_path), exist_ok=True)
100
+
96
101
  with DaprClient() as client:
97
102
  relative_path = os.path.relpath(file_path, download_file_prefix)
98
103
  metadata = {"key": relative_path, "fileName": relative_path}
@@ -7,7 +7,8 @@ from typing import Any, Dict
7
7
 
8
8
  from dapr.clients import DaprClient
9
9
 
10
- from application_sdk.inputs.statestore import StateStoreInput
10
+ from application_sdk.constants import LOCAL_DEVELOPMENT, SECRET_STORE_NAME
11
+ from application_sdk.inputs.statestore import StateStoreInput, StateType
11
12
  from application_sdk.observability.logger_adaptor import get_logger
12
13
 
13
14
  logger = get_logger(__name__)
@@ -15,7 +16,34 @@ logger = get_logger(__name__)
15
16
 
16
17
  class SecretStoreInput:
17
18
  @classmethod
18
- async def fetch_secret(cls, component_name: str, secret_key: str) -> Dict[str, Any]:
19
+ def get_secret(
20
+ cls, secret_key: str, component_name: str = SECRET_STORE_NAME
21
+ ) -> Dict[str, Any]:
22
+ """Get secret from the Dapr component.
23
+
24
+ Args:
25
+ secret_key: Key of the secret to fetch
26
+ component_name: Name of the Dapr component to fetch from
27
+
28
+ Returns:
29
+ Dict with processed secret data
30
+ """
31
+ try:
32
+ with DaprClient() as client:
33
+ dapr_secret_object = client.get_secret(
34
+ store_name=component_name, key=secret_key
35
+ )
36
+ return cls._process_secret_data(dapr_secret_object.secret)
37
+ except Exception as e:
38
+ logger.error(
39
+ f"Failed to fetch secret using component {component_name}: {str(e)}"
40
+ )
41
+ raise
42
+
43
+ @classmethod
44
+ async def fetch_secret(
45
+ cls, secret_key: str, component_name: str = SECRET_STORE_NAME
46
+ ) -> Dict[str, Any]:
19
47
  """Fetch secret using the Dapr component.
20
48
 
21
49
  Args:
@@ -29,9 +57,15 @@ class SecretStoreInput:
29
57
  Exception: If secret fetching fails
30
58
  """
31
59
  try:
32
- with DaprClient() as client:
33
- secret = client.get_secret(store_name=component_name, key=secret_key)
34
- return cls._process_secret_data(secret.secret)
60
+ secret = {}
61
+ if not LOCAL_DEVELOPMENT:
62
+ secret = cls.get_secret(secret_key, component_name)
63
+
64
+ credential_config = StateStoreInput.get_state(
65
+ secret_key, StateType.CREDENTIALS
66
+ )
67
+ secret.update(credential_config)
68
+ return secret
35
69
  except Exception as e:
36
70
  logger.error(
37
71
  f"Failed to fetch secret using component {component_name}: {str(e)}"
@@ -93,25 +127,3 @@ class SecretStoreInput:
93
127
  result_data["extra"][key] = secret_data[value]
94
128
 
95
129
  return result_data
96
-
97
- @classmethod
98
- def extract_credentials(cls, credential_guid: str) -> Dict[str, Any]:
99
- """Extract credentials from the state store using the credential GUID.
100
-
101
- Args:
102
- credential_guid: The unique identifier for the credentials.
103
-
104
- Returns:
105
- Dict[str, Any]: The credentials if found.
106
-
107
- Raises:
108
- ValueError: If the credential_guid is invalid or credentials are not found.
109
- Exception: If there's an error with the Dapr client operations.
110
-
111
- Examples:
112
- >>> SecretStoreInput.extract_credentials("1234567890")
113
- {"username": "admin", "password": "password"}
114
- """
115
- if not credential_guid:
116
- raise ValueError("Invalid credential GUID provided.")
117
- return StateStoreInput.get_state(f"credential_{credential_guid}")
@@ -1,26 +1,60 @@
1
1
  """State store for the application."""
2
2
 
3
3
  import json
4
+ import os
5
+ from enum import Enum
4
6
  from typing import Any, Dict
5
7
 
6
- from dapr.clients import DaprClient
7
8
  from temporalio import activity
8
9
 
9
- from application_sdk.common.error_codes import IOError
10
- from application_sdk.constants import STATE_STORE_NAME
10
+ from application_sdk.constants import (
11
+ APPLICATION_NAME,
12
+ STATE_STORE_PATH_TEMPLATE,
13
+ TEMPORARY_PATH,
14
+ )
15
+ from application_sdk.inputs.objectstore import ObjectStoreInput
11
16
  from application_sdk.observability.logger_adaptor import get_logger
12
17
 
13
18
  logger = get_logger(__name__)
14
19
  activity.logger = logger
15
20
 
16
21
 
22
+ class StateType(Enum):
23
+ WORKFLOWS = "workflows"
24
+ CREDENTIALS = "credentials"
25
+
26
+ @classmethod
27
+ def is_member(cls, type: str) -> bool:
28
+ return type in cls._value2member_map_
29
+
30
+
31
+ def build_state_store_path(id: str, state_type: StateType) -> str:
32
+ """Build the state file path for the given id and type.
33
+
34
+ Args:
35
+ id: The unique identifier for the state.
36
+ state_type: The type of state (workflows, credentials, etc.).
37
+
38
+ Returns:
39
+ str: The constructed state file path.
40
+
41
+ Example:
42
+ >>> build_state_store_path("wf-123", "workflows")
43
+ 'persistent-artifacts/apps/my-app/workflows/wf-123/config.json'
44
+ """
45
+ return STATE_STORE_PATH_TEMPLATE.format(
46
+ application_name=APPLICATION_NAME, state_type=state_type.value, id=id
47
+ )
48
+
49
+
17
50
  class StateStoreInput:
18
51
  @classmethod
19
- def get_state(cls, key: str) -> Dict[str, Any]:
52
+ def get_state(cls, id: str, type: StateType) -> Dict[str, Any]:
20
53
  """Get state from the store.
21
54
 
22
55
  Args:
23
- key: The key to retrieve the state for.
56
+ id: The key to retrieve the state for.
57
+ type: The type of state to retrieve.
24
58
 
25
59
  Returns:
26
60
  Dict[str, Any]: The retrieved state data.
@@ -28,39 +62,37 @@ class StateStoreInput:
28
62
  Raises:
29
63
  ValueError: If no state is found for the given key.
30
64
  IOError: If there's an error with the Dapr client operations.
65
+
66
+ Example:
67
+ >>> from application_sdk.inputs.statestore import StateStoreInput
68
+
69
+ >>> state = StateStoreInput.get_state("wf-123")
70
+ >>> print(state)
71
+ {'test': 'test'}
31
72
  """
73
+
74
+ state_file_path = build_state_store_path(id, type)
75
+ state = {}
76
+
32
77
  try:
33
- with DaprClient() as client:
34
- state = client.get_state(store_name=STATE_STORE_NAME, key=key)
35
- if not state.data:
36
- raise IOError(
37
- f"{IOError.STATE_STORE_ERROR}: State not found for key: {key}"
38
- )
39
- return json.loads(state.data)
40
- except IOError as e:
41
- logger.error(
42
- f"{IOError.STATE_STORE_ERROR}: Failed to extract state: {str(e)}",
43
- error_code=IOError.STATE_STORE_ERROR.code,
78
+ local_state_file_path = os.path.join(TEMPORARY_PATH, state_file_path)
79
+ ObjectStoreInput.download_file_from_object_store(
80
+ download_file_prefix=TEMPORARY_PATH,
81
+ file_path=local_state_file_path,
44
82
  )
45
- raise # Re-raise the exception after logging
46
83
 
47
- @classmethod
48
- def extract_configuration(cls, config_id: str) -> Dict[str, Any]:
49
- """Extract configuration from the state store using the config ID.
50
-
51
- Args:
52
- config_id: The unique identifier for the configuration.
84
+ with open(local_state_file_path, "r") as file:
85
+ state = json.load(file)
53
86
 
54
- Returns:
55
- Dict[str, Any]: The configuration if found.
87
+ except Exception as e:
88
+ # local error message is "file not found", while in object store it is "object not found"
89
+ if "not found" in str(e).lower():
90
+ logger.debug(
91
+ f"No state found for {type.value} with id '{id}', returning empty dict"
92
+ )
93
+ pass
94
+ else:
95
+ logger.error(f"Failed to extract state: {str(e)}")
96
+ raise
56
97
 
57
- Raises:
58
- ValueError: If the config_id is invalid or configuration is not found.
59
- IOError: If there's an error with the Dapr client operations.
60
- """
61
- if not config_id:
62
- raise IOError(
63
- f"{IOError.STATE_STORE_ERROR}: Invalid configuration ID provided."
64
- )
65
- config = cls.get_state(f"config_{config_id}")
66
- return config
98
+ return state
@@ -24,7 +24,6 @@ from application_sdk.constants import (
24
24
  LOG_FLUSH_INTERVAL_SECONDS,
25
25
  LOG_LEVEL,
26
26
  LOG_RETENTION_DAYS,
27
- OBSERVABILITY_DIR,
28
27
  OTEL_BATCH_DELAY_MS,
29
28
  OTEL_BATCH_SIZE,
30
29
  OTEL_EXPORTER_OTLP_ENDPOINT,
@@ -36,7 +35,10 @@ from application_sdk.constants import (
36
35
  SERVICE_VERSION,
37
36
  )
38
37
  from application_sdk.observability.observability import AtlanObservability
39
- from application_sdk.observability.utils import get_workflow_context
38
+ from application_sdk.observability.utils import (
39
+ get_observability_dir,
40
+ get_workflow_context,
41
+ )
40
42
 
41
43
 
42
44
  class LogExtraModel(BaseModel):
@@ -200,6 +202,13 @@ logging.basicConfig(
200
202
  level=logging.getLevelNamesMapping()[LOG_LEVEL], handlers=[InterceptHandler()]
201
203
  )
202
204
 
205
+ DEPENDENCY_LOGGERS = ["daft_io.stats", "tracing.span"]
206
+
207
+ # Configure external dependency loggers to reduce noise
208
+ for logger_name in DEPENDENCY_LOGGERS:
209
+ logging.getLogger(logger_name).setLevel(logging.WARNING)
210
+
211
+
203
212
  # Add these constants
204
213
  SEVERITY_MAPPING = {
205
214
  "DEBUG": SeverityNumber.DEBUG,
@@ -245,7 +254,7 @@ class AtlanLoggerAdapter(AtlanObservability[LogRecordModel]):
245
254
  flush_interval=LOG_FLUSH_INTERVAL_SECONDS,
246
255
  retention_days=LOG_RETENTION_DAYS,
247
256
  cleanup_enabled=LOG_CLEANUP_ENABLED,
248
- data_dir=OBSERVABILITY_DIR,
257
+ data_dir=get_observability_dir(),
249
258
  file_name=LOG_FILE_NAME,
250
259
  )
251
260
  self.logger_name = logger_name
@@ -19,7 +19,6 @@ from application_sdk.constants import (
19
19
  METRICS_FILE_NAME,
20
20
  METRICS_FLUSH_INTERVAL_SECONDS,
21
21
  METRICS_RETENTION_DAYS,
22
- OBSERVABILITY_DIR,
23
22
  OTEL_BATCH_DELAY_MS,
24
23
  OTEL_EXPORTER_OTLP_ENDPOINT,
25
24
  OTEL_EXPORTER_TIMEOUT_SECONDS,
@@ -30,7 +29,10 @@ from application_sdk.constants import (
30
29
  )
31
30
  from application_sdk.observability.logger_adaptor import get_logger
32
31
  from application_sdk.observability.observability import AtlanObservability
33
- from application_sdk.observability.utils import get_workflow_context
32
+ from application_sdk.observability.utils import (
33
+ get_observability_dir,
34
+ get_workflow_context,
35
+ )
34
36
 
35
37
 
36
38
  class MetricType(Enum):
@@ -165,7 +167,7 @@ class AtlanMetricsAdapter(AtlanObservability[MetricRecord]):
165
167
  flush_interval=METRICS_FLUSH_INTERVAL_SECONDS,
166
168
  retention_days=METRICS_RETENTION_DAYS,
167
169
  cleanup_enabled=METRICS_CLEANUP_ENABLED,
168
- data_dir=OBSERVABILITY_DIR,
170
+ data_dir=get_observability_dir(),
169
171
  file_name=METRICS_FILE_NAME,
170
172
  )
171
173
 
@@ -20,10 +20,11 @@ from application_sdk.constants import (
20
20
  LOG_FILE_NAME,
21
21
  METRICS_FILE_NAME,
22
22
  OBJECT_STORE_NAME,
23
- OBSERVABILITY_DIR,
24
23
  STATE_STORE_NAME,
24
+ TEMPORARY_PATH,
25
25
  TRACES_FILE_NAME,
26
26
  )
27
+ from application_sdk.observability.utils import get_observability_dir
27
28
 
28
29
 
29
30
  class LogRecord(BaseModel):
@@ -440,8 +441,8 @@ class AtlanObservability(Generic[T], ABC):
440
441
  with open(parquet_path, "rb") as f:
441
442
  file_content = f.read()
442
443
  metadata = {
443
- "key": os.path.relpath(parquet_path, self.data_dir),
444
- "blobName": os.path.relpath(parquet_path, self.data_dir),
444
+ "key": os.path.relpath(parquet_path, TEMPORARY_PATH),
445
+ "blobName": os.path.relpath(parquet_path, TEMPORARY_PATH),
445
446
  "fileName": os.path.basename(parquet_path),
446
447
  }
447
448
  with DaprClient() as client:
@@ -614,9 +615,10 @@ class AtlanObservability(Generic[T], ABC):
614
615
  class DuckDBUI:
615
616
  """Class to handle DuckDB UI functionality."""
616
617
 
617
- def __init__(self, db_path="/tmp/observability/observability.db"):
618
+ def __init__(self):
618
619
  """Initialize the DuckDB UI handler."""
619
- self.db_path = db_path
620
+ self.observability_dir = get_observability_dir()
621
+ self.db_path = self.observability_dir + "/observability.db"
620
622
  self._duckdb_ui_con = None
621
623
 
622
624
  def _is_duckdb_ui_running(self, host="0.0.0.0", port=4213):
@@ -628,10 +630,10 @@ class DuckDBUI:
628
630
  result = sock.connect_ex((host, port))
629
631
  return result == 0
630
632
 
631
- def start_ui(self, db_path=OBSERVABILITY_DIR):
633
+ def start_ui(self):
632
634
  """Start DuckDB UI and create views for Hive partitioned parquet files."""
633
635
  if not self._is_duckdb_ui_running():
634
- os.makedirs(OBSERVABILITY_DIR, exist_ok=True)
636
+ os.makedirs(self.observability_dir, exist_ok=True)
635
637
  con = duckdb.connect(self.db_path)
636
638
 
637
639
  def process_partitioned_files(directory, prefix=""):
@@ -660,7 +662,7 @@ class DuckDBUI:
660
662
 
661
663
  # Process each type of data
662
664
  for data_type in ["logs", "metrics", "traces"]:
663
- data_dir = os.path.join(OBSERVABILITY_DIR, data_type)
665
+ data_dir = os.path.join(self.observability_dir, data_type)
664
666
  if os.path.exists(data_dir):
665
667
  process_partitioned_files(data_dir, data_type)
666
668
 
@@ -14,7 +14,6 @@ from pydantic import BaseModel
14
14
 
15
15
  from application_sdk.constants import (
16
16
  ENABLE_OTLP_TRACES,
17
- OBSERVABILITY_DIR,
18
17
  OTEL_BATCH_DELAY_MS,
19
18
  OTEL_EXPORTER_OTLP_ENDPOINT,
20
19
  OTEL_EXPORTER_TIMEOUT_SECONDS,
@@ -30,6 +29,7 @@ from application_sdk.constants import (
30
29
  )
31
30
  from application_sdk.observability.logger_adaptor import get_logger
32
31
  from application_sdk.observability.observability import AtlanObservability
32
+ from application_sdk.observability.utils import get_observability_dir
33
33
 
34
34
 
35
35
  class TraceRecord(BaseModel):
@@ -94,7 +94,7 @@ class AtlanTracesAdapter(AtlanObservability[TraceRecord]):
94
94
  flush_interval=TRACES_FLUSH_INTERVAL_SECONDS,
95
95
  retention_days=TRACES_RETENTION_DAYS,
96
96
  cleanup_enabled=TRACES_CLEANUP_ENABLED,
97
- data_dir=OBSERVABILITY_DIR,
97
+ data_dir=get_observability_dir(),
98
98
  file_name=TRACES_FILE_NAME,
99
99
  )
100
100
 
@@ -1,6 +1,14 @@
1
+ import os
2
+
1
3
  from pydantic import BaseModel, Field
2
4
  from temporalio import activity, workflow
3
5
 
6
+ from application_sdk.constants import (
7
+ APPLICATION_NAME,
8
+ OBSERVABILITY_DIR,
9
+ TEMPORARY_PATH,
10
+ )
11
+
4
12
 
5
13
  class WorkflowContext(BaseModel):
6
14
  """Workflow context."""
@@ -17,6 +25,20 @@ class WorkflowContext(BaseModel):
17
25
  workflow_run_id: str = Field(init=False, default="")
18
26
 
19
27
 
28
+ def get_observability_dir() -> str:
29
+ """Build the observability path.
30
+
31
+ Args:
32
+ path: The path to build the observability path from.
33
+
34
+ Returns:
35
+ str: The built observability path.
36
+ """
37
+ return os.path.join(
38
+ TEMPORARY_PATH, OBSERVABILITY_DIR.format(application_name=APPLICATION_NAME)
39
+ )
40
+
41
+
20
42
  def get_workflow_context() -> WorkflowContext:
21
43
  """Get the workflow context.
22
44
 
@@ -23,7 +23,9 @@ class ObjectStoreOutput:
23
23
 
24
24
  Args:
25
25
  output_prefix (str): The base path to calculate relative paths from.
26
+ example: /tmp/output
26
27
  file_path (str): The full path to the file to be pushed.
28
+ example: /tmp/output/persistent-artifacts/apps/myapp/data/wf-123/state.json
27
29
 
28
30
  Raises:
29
31
  IOError: If there's an error reading the file.
@@ -188,7 +188,10 @@ class ParquetOutput(Output):
188
188
  self.total_record_count += row_count
189
189
 
190
190
  # Generate file path using path_gen function
191
- file_path = f"{self.output_path}/{self.path_gen(self.chunk_start, self.chunk_count, self.start_marker, self.end_marker)}"
191
+ if self.start_marker and self.end_marker:
192
+ file_path = self.output_path
193
+ else:
194
+ file_path = f"{self.output_path}/{self.path_gen(self.chunk_start, self.chunk_count, self.start_marker, self.end_marker)}"
192
195
 
193
196
  # Write the dataframe to parquet using daft
194
197
  dataframe.write_parquet(
@@ -3,12 +3,14 @@
3
3
  import uuid
4
4
  from typing import Any, Dict
5
5
 
6
+ from application_sdk.constants import LOCAL_DEVELOPMENT
7
+ from application_sdk.inputs.statestore import StateType
6
8
  from application_sdk.outputs.statestore import StateStoreOutput
7
9
 
8
10
 
9
11
  class SecretStoreOutput:
10
12
  @classmethod
11
- def store_credentials(cls, config: Dict[str, Any]) -> str:
13
+ async def save_secret(cls, config: Dict[str, Any]) -> str:
12
14
  """Store credentials in the state store.
13
15
 
14
16
  Args:
@@ -21,9 +23,16 @@ class SecretStoreOutput:
21
23
  Exception: If there's an error with the Dapr client operations.
22
24
 
23
25
  Examples:
24
- >>> SecretStoreOutput.store_credentials({"username": "admin", "password": "password"})
25
- "credential_1234567890"
26
+ >>> SecretStoreOutput.save_secret({"username": "admin", "password": "password"})
27
+ "1234567890"
26
28
  """
27
- credential_guid = str(uuid.uuid4())
28
- StateStoreOutput.save_state(f"credential_{credential_guid}", config)
29
- return credential_guid
29
+ if LOCAL_DEVELOPMENT:
30
+ # NOTE: (development) temporary solution to store the credentials in the state store.
31
+ # In production, dapr doesn't support creating secrets.
32
+ credential_guid = str(uuid.uuid4())
33
+ await StateStoreOutput.save_state_object(
34
+ id=credential_guid, value=config, type=StateType.CREDENTIALS
35
+ )
36
+ return credential_guid
37
+ else:
38
+ raise ValueError("Storing credentials is not supported in production.")
@@ -1,13 +1,19 @@
1
1
  """State store for the application."""
2
2
 
3
3
  import json
4
+ import os
4
5
  from typing import Any, Dict
5
6
 
6
- from dapr.clients import DaprClient
7
7
  from temporalio import activity
8
8
 
9
- from application_sdk.constants import STATE_STORE_NAME
9
+ from application_sdk.constants import TEMPORARY_PATH
10
+ from application_sdk.inputs.statestore import (
11
+ StateStoreInput,
12
+ StateType,
13
+ build_state_store_path,
14
+ )
10
15
  from application_sdk.observability.logger_adaptor import get_logger
16
+ from application_sdk.outputs.objectstore import ObjectStoreOutput
11
17
 
12
18
  logger = get_logger(__name__)
13
19
  activity.logger = logger
@@ -15,7 +21,7 @@ activity.logger = logger
15
21
 
16
22
  class StateStoreOutput:
17
23
  @classmethod
18
- def save_state(cls, key: str, value: Dict[str, Any]) -> None:
24
+ async def save_state(cls, key: str, value: Any, id: str, type: StateType) -> None:
19
25
  """Save state to the store.
20
26
 
21
27
  Args:
@@ -24,32 +30,80 @@ class StateStoreOutput:
24
30
 
25
31
  Raises:
26
32
  Exception: If there's an error with the Dapr client operations.
33
+
34
+ Example:
35
+ >>> from application_sdk.outputs.statestore import StateStoreOutput
36
+
37
+ >>> await StateStoreOutput.save_state("test", {"test": "test"}, "wf-123")
27
38
  """
28
39
  try:
29
- with DaprClient() as client:
30
- client.save_state(
31
- store_name=STATE_STORE_NAME,
32
- key=key,
33
- value=json.dumps(value),
34
- )
35
- logger.info(f"State stored successfully with key: {key}")
40
+ # get the current state from object store
41
+ current_state = StateStoreInput.get_state(id, type)
42
+ state_file_path = build_state_store_path(id, type)
43
+
44
+ # update the state with the new value
45
+ current_state[key] = value
46
+
47
+ local_state_file_path = os.path.join(TEMPORARY_PATH, state_file_path)
48
+ os.makedirs(os.path.dirname(local_state_file_path), exist_ok=True)
49
+
50
+ # save the state to a local file
51
+ with open(local_state_file_path, "w") as file:
52
+ json.dump(current_state, file)
53
+
54
+ # save the state to the object store
55
+ await ObjectStoreOutput.push_file_to_object_store(
56
+ output_prefix=TEMPORARY_PATH, file_path=local_state_file_path
57
+ )
58
+
36
59
  except Exception as e:
37
60
  logger.error(f"Failed to store state: {str(e)}")
38
61
  raise e
39
62
 
40
63
  @classmethod
41
- def store_configuration(cls, config_id: str, config: Dict[str, Any]) -> str:
42
- """Store configuration in the state store.
64
+ async def save_state_object(
65
+ cls, id: str, value: Dict[str, Any], type: StateType
66
+ ) -> Dict[str, Any]:
67
+ """Save the entire state object to the object store.
43
68
 
44
69
  Args:
45
- config_id: The unique identifier for the workflow.
46
- config: The configuration to store.
70
+ id: The id of the state.
71
+ value: The value of the state.
72
+ type: The type of the state.
47
73
 
48
74
  Returns:
49
- str: The generated configuration ID.
75
+ Dict[str, Any]: The updated state.
50
76
 
51
77
  Raises:
78
+ ValueError: If the type is invalid.
52
79
  Exception: If there's an error with the Dapr client operations.
80
+
81
+ Example:
82
+ >>> from application_sdk.outputs.statestore import StateStoreOutput
83
+ >>> await StateStoreOutput.save_state_object("wf-123", {"test": "test"}, "workflow")
53
84
  """
54
- cls.save_state(f"config_{config_id}", config)
55
- return config_id
85
+
86
+ try:
87
+ # get the current state from object store
88
+ current_state = StateStoreInput.get_state(id, type)
89
+ state_file_path = build_state_store_path(id, type)
90
+
91
+ # update the state with the new value
92
+ current_state.update(value)
93
+
94
+ local_state_file_path = os.path.join(TEMPORARY_PATH, state_file_path)
95
+ os.makedirs(os.path.dirname(local_state_file_path), exist_ok=True)
96
+
97
+ # save the state to a local file
98
+ with open(local_state_file_path, "w") as file:
99
+ json.dump(current_state, file)
100
+
101
+ # save the state to the object store
102
+ await ObjectStoreOutput.push_file_to_object_store(
103
+ output_prefix=TEMPORARY_PATH, file_path=local_state_file_path
104
+ )
105
+
106
+ return current_state
107
+ except Exception as e:
108
+ logger.error(f"Failed to store state: {str(e)}")
109
+ raise e
@@ -12,7 +12,6 @@ from fastapi.templating import Jinja2Templates
12
12
  from uvicorn import Config, Server
13
13
 
14
14
  from application_sdk.clients.workflow import WorkflowClient
15
- from application_sdk.common.utils import get_workflow_config, update_workflow_config
16
15
  from application_sdk.constants import (
17
16
  APP_DASHBOARD_HOST,
18
17
  APP_DASHBOARD_PORT,
@@ -26,9 +25,11 @@ from application_sdk.constants import (
26
25
  )
27
26
  from application_sdk.docgen import AtlanDocsGenerator
28
27
  from application_sdk.handlers import HandlerInterface
28
+ from application_sdk.inputs.statestore import StateStoreInput, StateType
29
29
  from application_sdk.observability.logger_adaptor import get_logger
30
30
  from application_sdk.observability.metrics_adaptor import MetricType, get_metrics
31
31
  from application_sdk.observability.observability import DuckDBUI
32
+ from application_sdk.outputs.statestore import StateStoreOutput
32
33
  from application_sdk.server import ServerInterface
33
34
  from application_sdk.server.fastapi.middleware.logmiddleware import LogMiddleware
34
35
  from application_sdk.server.fastapi.middleware.metrics import MetricsMiddleware
@@ -587,16 +588,22 @@ class APIServer(ServerInterface):
587
588
  )
588
589
  raise e
589
590
 
590
- def get_workflow_config(self, config_id: str) -> WorkflowConfigResponse:
591
+ def get_workflow_config(
592
+ self, config_id: str, type: str = "workflows"
593
+ ) -> WorkflowConfigResponse:
591
594
  """Retrieve workflow configuration by ID.
592
595
 
593
596
  Args:
594
- config_id (str): The ID of the workflow configuration to retrieve.
597
+ config_id (str): The ID of the configuration to retrieve.
598
+ type (str): The type of the configuration to retrieve.
595
599
 
596
600
  Returns:
597
601
  WorkflowConfigResponse: Response containing the workflow configuration.
598
602
  """
599
- config = get_workflow_config(config_id)
603
+ if not StateType.is_member(type):
604
+ raise ValueError(f"Invalid type {type} for state store")
605
+
606
+ config = StateStoreInput.get_state(config_id, StateType(type))
600
607
  return WorkflowConfigResponse(
601
608
  success=True,
602
609
  message="Workflow configuration fetched successfully",
@@ -658,8 +665,8 @@ class APIServer(ServerInterface):
658
665
  )
659
666
  raise e
660
667
 
661
- def update_workflow_config(
662
- self, config_id: str, body: WorkflowConfigRequest
668
+ async def update_workflow_config(
669
+ self, config_id: str, body: WorkflowConfigRequest, type: str = "workflows"
663
670
  ) -> WorkflowConfigResponse:
664
671
  """Update workflow configuration.
665
672
 
@@ -670,8 +677,12 @@ class APIServer(ServerInterface):
670
677
  Returns:
671
678
  WorkflowConfigResponse: Response containing the updated configuration.
672
679
  """
673
- # note: it's assumed that the preflight check is successful if the config is being updated
674
- config = update_workflow_config(config_id, body.model_dump())
680
+ if not StateType.is_member(type):
681
+ raise ValueError(f"Invalid type {type} for state store")
682
+
683
+ config = await StateStoreOutput.save_state_object(
684
+ id=config_id, value=body.model_dump(), type=StateType(type)
685
+ )
675
686
  return WorkflowConfigResponse(
676
687
  success=True,
677
688
  message="Workflow configuration updated successfully",
@@ -162,17 +162,9 @@ class EventWorkflowResponse(WorkflowResponse):
162
162
  status: Status = Field(..., description="Status of the workflow")
163
163
 
164
164
 
165
- class WorkflowConfigRequest(BaseModel):
166
- credential_guid: Optional[str] = Field(
167
- default=None, description="Optional GUID field containing database credentials"
168
- )
169
- connection: Optional[Dict[str, Any]] = Field(
170
- default=None,
171
- description="Optional JSON field containing connection configuration",
172
- )
173
- metadata: Optional[Dict[str, Any]] = Field(
174
- default=None,
175
- description="Optional JSON field containing metadata configuration",
165
+ class WorkflowConfigRequest(RootModel[Dict[str, Any]]):
166
+ root: Dict[str, Any] = Field(
167
+ ..., description="Root JSON object containing workflow configuration"
176
168
  )
177
169
 
178
170
 
@@ -143,9 +143,6 @@ class TestInterface:
143
143
  f"Schema base path not found: {cls.schema_base_path}"
144
144
  )
145
145
 
146
- # Prepare the extracted output base path
147
- cls.extracted_output_base_path = "/tmp/output"
148
-
149
146
  def monitor_and_wait_workflow_execution(self) -> str:
150
147
  """
151
148
  Method to monitor the workflow execution
@@ -2,4 +2,4 @@
2
2
  Version information for the application_sdk package.
3
3
  """
4
4
 
5
- __version__ = "0.1.1rc21"
5
+ __version__ = "0.1.1rc23"
@@ -85,9 +85,6 @@ class WorkflowInterface(ABC, Generic[ActivitiesInterfaceType]):
85
85
  logger.info("Starting workflow execution")
86
86
 
87
87
  try:
88
- workflow_run_id = workflow.info().run_id
89
- workflow_args["workflow_run_id"] = workflow_run_id
90
-
91
88
  retry_policy = RetryPolicy(maximum_attempts=2, backoff_coefficient=2)
92
89
 
93
90
  await workflow.execute_activity_method(
@@ -201,19 +201,12 @@ class BaseSQLMetadataExtractionWorkflow(MetadataExtractionWorkflow):
201
201
  heartbeat_timeout=self.default_heartbeat_timeout,
202
202
  )
203
203
 
204
- workflow_run_id = workflow.info().run_id
205
- workflow_args["workflow_run_id"] = workflow_run_id
206
-
207
204
  logger.info(f"Starting extraction workflow for {workflow_id}")
208
205
  retry_policy = RetryPolicy(
209
206
  maximum_attempts=6,
210
207
  backoff_coefficient=2,
211
208
  )
212
209
 
213
- output_prefix = workflow_args["output_prefix"]
214
- output_path = f"{output_prefix}/{workflow_id}/{workflow_run_id}"
215
- workflow_args["output_path"] = output_path
216
-
217
210
  fetch_functions = self.get_fetch_functions()
218
211
 
219
212
  fetch_and_transforms = [
@@ -97,11 +97,6 @@ class SQLQueryExtractionWorkflow(QueryExtractionWorkflow):
97
97
  backoff_coefficient=2,
98
98
  )
99
99
 
100
- workflow_run_id = workflow.info().run_id
101
- output_prefix = workflow_args["output_prefix"]
102
- output_path = f"{output_prefix}/{workflow_id}/{workflow_run_id}"
103
- workflow_args["output_path"] = output_path
104
-
105
100
  results: List[Dict[str, Any]] = await workflow.execute_activity_method(
106
101
  self.activities_cls.get_query_batches,
107
102
  workflow_args,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: atlan-application-sdk
3
- Version: 0.1.1rc21
3
+ Version: 0.1.1rc23
4
4
  Summary: Atlan Application SDK is a Python library for developing applications on the Atlan Platform
5
5
  Project-URL: Repository, https://github.com/atlanhq/application-sdk
6
6
  Project-URL: Documentation, https://github.com/atlanhq/application-sdk/README.md
@@ -25,7 +25,7 @@ Requires-Dist: fastapi[standard]>=0.115.0
25
25
  Requires-Dist: loguru>=0.7.3
26
26
  Requires-Dist: opentelemetry-exporter-otlp>=1.27.0
27
27
  Requires-Dist: psutil>=7.0.0
28
- Requires-Dist: pyatlan==7.1.1
28
+ Requires-Dist: pyatlan>=7.1.1
29
29
  Requires-Dist: pydantic>=2.10.6
30
30
  Requires-Dist: python-dotenv>=1.1.0
31
31
  Requires-Dist: uvloop>=0.21.0; sys_platform != 'win32'
@@ -1,29 +1,29 @@
1
1
  application_sdk/__init__.py,sha256=2e2mvmLJ5dxmJGPELtb33xwP-j6JMdoIuqKycEn7hjg,151
2
- application_sdk/constants.py,sha256=WraVyKFkUnMHuPRdYcAyDZKee_Gb23PlBj86l4VGEyI,6934
3
- application_sdk/version.py,sha256=Iz7gukOFbpIONxCSanVZq6hEzppeg0PgEBO4JbJMLF0,88
2
+ application_sdk/constants.py,sha256=fWnEXxywSTB2d9D3a6RII-8rokgab0FcBl-2rjyNtls,7729
3
+ application_sdk/version.py,sha256=b7TQsEG_zt2uFRKeBfqwlG9fyB-_XFYYM6E3sNZz4wA,88
4
4
  application_sdk/worker.py,sha256=2fLjuKNJafhaQXrHzmxXYO22F4ZSc0igMjoxXVNBFfk,6167
5
- application_sdk/activities/__init__.py,sha256=_uWkRVaHjwthE4w96SoNGl84VemYE4G-TJFOXw_VOQM,8924
5
+ application_sdk/activities/__init__.py,sha256=EH5VTHcfGykIX7V1HsG0J1Z-1FbJEXTQOET0HdzFDjU,9519
6
6
  application_sdk/activities/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  application_sdk/activities/common/models.py,sha256=305WdrZB7EAtCOAU_q9hMw81XowUdCeuFs9zfzb-MHQ,1196
8
- application_sdk/activities/common/utils.py,sha256=b4sWj6Qrga0tfFxEjVDhKK6Hb86SvqsP8tLXLbKxhCM,5521
8
+ application_sdk/activities/common/utils.py,sha256=CWAj_tQUSQirSs5wwy-9eS8yI_4HoDfXsj2U_Xkb4Bc,6480
9
9
  application_sdk/activities/metadata_extraction/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  application_sdk/activities/metadata_extraction/rest.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- application_sdk/activities/metadata_extraction/sql.py,sha256=CPafrCmUaPTFzMhGhqB4KRaje-OO1BZH40BZVVZ714E,20151
11
+ application_sdk/activities/metadata_extraction/sql.py,sha256=Qoz3hNV5asnwUk4G9VLUowfxzyARP5KZnymV5m4ww4Y,20161
12
12
  application_sdk/activities/query_extraction/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- application_sdk/activities/query_extraction/sql.py,sha256=-Nusl7PuhkUiVLZ0F8r20cmg6CAo7lz9wuc-YM86okc,21082
13
+ application_sdk/activities/query_extraction/sql.py,sha256=oySpho_rM8HV6mWD6yEyCIg9BOG-35wb8HWlZzyhuqA,21084
14
14
  application_sdk/application/__init__.py,sha256=wCaAsoQ-wIYtfeG6hNwwLi8PcufjwMBjmfp4AxWqSyw,7558
15
15
  application_sdk/application/metadata_extraction/sql.py,sha256=ohpV4qZ92uKRlH7I_8G67ocnWkZJAZCU_7XdvqYPiN4,7966
16
16
  application_sdk/clients/__init__.py,sha256=C9T84J7V6ZumcoWJPAxdd3tqSmbyciaGBJn-CaCCny0,1341
17
17
  application_sdk/clients/sql.py,sha256=xnGmA39TRlIVAd9TL--jvuzeMHHvasxlstCUmqGZrbA,18308
18
- application_sdk/clients/temporal.py,sha256=Fe2YnmpSmKiVVW9PxNz1KCl41EssgBr4E69dJx8f8KU,17593
18
+ application_sdk/clients/temporal.py,sha256=GdyuU30fL7C41azXT95cOBZV3h4ExRR4-X_bp5DT-Qs,17592
19
19
  application_sdk/clients/utils.py,sha256=zLFOJbTr_6TOqnjfVFGY85OtIXZ4FQy_rquzjaydkbY,779
20
20
  application_sdk/clients/workflow.py,sha256=6bSqmA3sNCk9oY68dOjBUDZ9DhNKQxPD75qqE0cfldc,6104
21
21
  application_sdk/common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
22
  application_sdk/common/aws_utils.py,sha256=aeL3BTMzv1UWJ4KxfwY5EsfYnxtS1FKNJ4xKdHeoTjc,3438
23
- application_sdk/common/credential_utils.py,sha256=VLGoNgSNAio8qHomMUYsPGR1Fv2n2ehbyXG14obg53Y,1748
23
+ application_sdk/common/credential_utils.py,sha256=-JKU74uPoE5hweu2ag5Hvql4Jrt0FWqe4MjpSi9ts58,1796
24
24
  application_sdk/common/dataframe_utils.py,sha256=PId9vT6AUoq3tesiTd4sSUvW7RUhPWdAAEBLuOprks4,1262
25
25
  application_sdk/common/error_codes.py,sha256=mIstjpedjvN8AKwvCxI9MOGbJ7EXJs9ch9WnQh_vPAU,13252
26
- application_sdk/common/utils.py,sha256=KD0lNGKiVT3t9zmg35MdJ8bJhPs8cxlTSWJsthr1R2w,14607
26
+ application_sdk/common/utils.py,sha256=mkkkSpz65raVTVQ4bNkf6jG32PjYpzocy48W2nTQjdo,13544
27
27
  application_sdk/docgen/__init__.py,sha256=Gr_3uVEnSspKd_-R1YRsDABI-iP4170Dvg5jM2oD76A,7352
28
28
  application_sdk/docgen/exporters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
29
  application_sdk/docgen/exporters/mkdocs.py,sha256=vxYjKLz-7z7rBUSkOGTdNVlPdx_VPfFfJ31HHNgMCeM,4024
@@ -44,29 +44,29 @@ application_sdk/handlers/sql.py,sha256=lP3sH6cZK94rJMC-Ul9BomNH3wT-IQ_uwEhh2xAjN
44
44
  application_sdk/inputs/__init__.py,sha256=_d-cUhcDyoJTJR3PdQkC831go6VDw9AM6Bg7-qm3NHI,1900
45
45
  application_sdk/inputs/iceberg.py,sha256=xiv1kNtVx1k0h3ZJbJeXjZwdfBGSy9j9orYP_AyCYlI,2756
46
46
  application_sdk/inputs/json.py,sha256=J1CVz0YGQHDyq840TyoBHE7Baua2yIFHzsrybiZbeWk,6079
47
- application_sdk/inputs/objectstore.py,sha256=SrDqO-51XmpGCEqPvzVA7gcOuL1d84phY3ZJNO2Rh28,4246
47
+ application_sdk/inputs/objectstore.py,sha256=_cLP-4J_euim69f7kp5Z8Ea0AxTcy68mwDVwlvkZE-g,4502
48
48
  application_sdk/inputs/parquet.py,sha256=rrDI_OAktZIpWMIJIvyNoqrDg-lH2t_4NnB_x6JmX30,6101
49
- application_sdk/inputs/secretstore.py,sha256=qvI25cKnRkW2qovp6E5WvHP78ro2m9uzI0Cv2GgOlA8,4169
49
+ application_sdk/inputs/secretstore.py,sha256=yJXTmfD0xvhrYtTK5GRmK8ob95B1I6ELvWAmzo72P1k,4413
50
50
  application_sdk/inputs/sql_query.py,sha256=1EREgea6kKNaMIyX2HLJgbJ07rtAgLasd9NyvDcdZok,10636
51
- application_sdk/inputs/statestore.py,sha256=e05rR2oCs4z41eTkR8BBeuMa2tU1DXW-vQhiyghz1kI,2181
51
+ application_sdk/inputs/statestore.py,sha256=A7HH7C8ZJqczH61Qz-2gV90ZOVZDGEXCNoQsGtXfzro,2881
52
52
  application_sdk/observability/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
- application_sdk/observability/logger_adaptor.py,sha256=9Po6QAX3GbKUwphYTaMO6sRCpL3uOBspijmBEGY3sWk,29296
54
- application_sdk/observability/metrics_adaptor.py,sha256=lwbRcntaDWsFV6upa2oyfe3UADa3UpRf_gEfMvA8AXs,16426
55
- application_sdk/observability/observability.py,sha256=hdxtu-zvnOdZoC16ZvG-CiZYB2RYMY2QCbX-aLJ5Pl8,24341
56
- application_sdk/observability/traces_adaptor.py,sha256=fCL1EF4q1g1yFeI3c3oFsjLHmXHRq5J5sJp88O-kkvg,18189
57
- application_sdk/observability/utils.py,sha256=venirNS-ZybmO0CM_JvX8Wy29eRscexkdkQfc5BJsgo,2002
53
+ application_sdk/observability/logger_adaptor.py,sha256=WTqnNg78W2SRGOQVhELVLn6KMRsurkG1kc7essL08Lk,29529
54
+ application_sdk/observability/metrics_adaptor.py,sha256=4TYPNn38zLeqxwf7cUbe8wh_zwQlr-nyiXjJsiEhTEM,16445
55
+ application_sdk/observability/observability.py,sha256=-2aluQVV7USfMNlHvCZyFS0PfQsunladwvK5DUJOXfE,24440
56
+ application_sdk/observability/traces_adaptor.py,sha256=0eQJPN-tYA_dV8D3uEa5ZiX9g12NDuLnPaFuQMVDdL0,18242
57
+ application_sdk/observability/utils.py,sha256=MKEpT0WYtpATUgLgJDkGQaAP_t-jpDYMUKDfEvr8Phg,2448
58
58
  application_sdk/observability/decorators/observability_decorator.py,sha256=JNrWNXT5W4klmlAc5b8C3_VBjDu0PI64W2ptr7LMzk4,8110
59
59
  application_sdk/outputs/__init__.py,sha256=HX8VcN22xyrkoRWdjQj8TrC5dEUG7cPzOcvJhlprqAs,8415
60
60
  application_sdk/outputs/eventstore.py,sha256=fEW1oo9ncKylF5uLXTgjyRQZCrj72Gta3CaeduSX8nw,5548
61
61
  application_sdk/outputs/iceberg.py,sha256=IGtj5WDgqLu6vzDEvw5DLsKsjm29Krto3AHvWpemr0A,5311
62
62
  application_sdk/outputs/json.py,sha256=xF-8mY3BZRRejip4s9npIUuFaAxgFmBQVaLMkrI_iCI,14117
63
- application_sdk/outputs/objectstore.py,sha256=o9HgQk7pTqKnih8Sn0-yZcsH2q1sZrbBepqy2RTF4K4,3406
64
- application_sdk/outputs/parquet.py,sha256=unFsnja1HidZ0sBExemG-Ykdgq5dqNes1Ms4-UUEAkc,10919
65
- application_sdk/outputs/secretstore.py,sha256=LPIUPxcaJhYbsnZaIGDqNn07vQdIzbB2YvEvsllq8ws,841
66
- application_sdk/outputs/statestore.py,sha256=TemBYtlSvsmQUhndQ6uvQ1bgg0a8NEKTockM6cROWuM,1639
63
+ application_sdk/outputs/objectstore.py,sha256=AOYs5cVobuGn2NRqfGRnso36u66eGJaUsYDg1i6DTfU,3535
64
+ application_sdk/outputs/parquet.py,sha256=A2EnEx1zWjaXk10u3eJusmWxGxt8WR7CHXDaJgsKpq0,11040
65
+ application_sdk/outputs/secretstore.py,sha256=AuLcM-wmzr7ZTXePLk3dunj3vhIhWkigQ-XYJo6SOZA,1306
66
+ application_sdk/outputs/statestore.py,sha256=397F9tMsW1-j38kDd9FjPKsr95wg9LKgoi0hGbwle-4,3668
67
67
  application_sdk/server/__init__.py,sha256=KTqE1YPw_3WDVMWatJUuf9OOiobLM2K5SMaBrI62sCo,1568
68
- application_sdk/server/fastapi/__init__.py,sha256=PCz8WP-uxlgcVeTrMcaXnfz7tpKEb5rf3E6QHJrTvOQ,27384
69
- application_sdk/server/fastapi/models.py,sha256=KrOM-cvX22MdvkwDCZLvqXTkNKzORzt-L1c8Z2fTY3U,7001
68
+ application_sdk/server/fastapi/__init__.py,sha256=QiIaR43QhgJaLeKs5lTghfxCAGxVxFnYAudy47akbP4,27783
69
+ application_sdk/server/fastapi/models.py,sha256=mFFxteDS3ZYXaq7Apor_Meo5WNxTCxqdrMkmTKQjvP0,6687
70
70
  application_sdk/server/fastapi/utils.py,sha256=2XI4DylhRQsukhX67lpAzRNCHeFCSpbuNd7TlE2IBJA,1164
71
71
  application_sdk/server/fastapi/middleware/logmiddleware.py,sha256=CxcPtDmCbSfSZ8RyI09nIshVIbCokyyA9bByQJ2G_ns,2545
72
72
  application_sdk/server/fastapi/middleware/metrics.py,sha256=5ddHAIg5sT-u9tB_HHMGL3Cfu2g1rm9z7ksienIr9ks,1563
@@ -74,7 +74,7 @@ application_sdk/server/fastapi/routers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JC
74
74
  application_sdk/server/fastapi/routers/server.py,sha256=vfHQwZCysThzfeVFNVW1IjuAdL0c1Cs4fULKTBK2eNo,4209
75
75
  application_sdk/test_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
76
76
  application_sdk/test_utils/workflow_monitoring.py,sha256=gqq6CsT62GrMt2GqtNSb1iD_-t4MBffQvpO0BXboZek,3490
77
- application_sdk/test_utils/e2e/__init__.py,sha256=aASnvkXnhgD9ZUZuWa3TWNMve8S5WBPawrhdQQOKJB8,10230
77
+ application_sdk/test_utils/e2e/__init__.py,sha256=qswG5goB5Xzt3yQyZsKGq20EnZMZCmXYTe0CK3Qk3xo,10125
78
78
  application_sdk/test_utils/e2e/base.py,sha256=wba-tyJGbPlb7YnJcWIBuIeZTdENM4PxVbm7MahWqYk,12037
79
79
  application_sdk/test_utils/e2e/client.py,sha256=S83tKEwRTTYzxxG_F17gkxqjqdgmM8HyoHrhMTOTsQw,3259
80
80
  application_sdk/test_utils/e2e/conftest.py,sha256=y1iYvlWHHBJY9HI4M7p_0btIGryTF6KlhmKxkARcZHU,68
@@ -122,13 +122,13 @@ application_sdk/transformers/query/templates/function.yaml,sha256=yCrSdzKPyOoCKP
122
122
  application_sdk/transformers/query/templates/schema.yaml,sha256=U9voaKusmF_8a2_jBmdpNo9nJVLWatwsyKJORenbQ6M,1809
123
123
  application_sdk/transformers/query/templates/table.yaml,sha256=QQAGLD1UFjbpSA5wvkuufv0OeIw3pfNzW4cYF73tvKY,8080
124
124
  application_sdk/transformers/query/templates/tag_attachment.yaml,sha256=dWNDGwRU4_P-t7ibv5XelMP36aGLG29U6MEXOA8zYt0,2884
125
- application_sdk/workflows/__init__.py,sha256=mlnlA_JixQ79g20KyKIdJCyqy0L0bRt1hF7kXHrR-yk,3986
125
+ application_sdk/workflows/__init__.py,sha256=byluvgzTovr4L1co7YGb4--ktMBqt2pXBjYoxz4dIeU,3869
126
126
  application_sdk/workflows/metadata_extraction/__init__.py,sha256=jHUe_ZBQ66jx8bgyduPuECo2RdmJtQsQAKlakADEQbc,120
127
- application_sdk/workflows/metadata_extraction/sql.py,sha256=FYGf1RFa-l1VYlI1fuOWSmqvjGEZznr8WYuEL8ZCMjo,11223
127
+ application_sdk/workflows/metadata_extraction/sql.py,sha256=CJzrY8X8_E5I8M0vHPgwm5PntANRQup_m6_9JWkHVdw,10914
128
128
  application_sdk/workflows/query_extraction/__init__.py,sha256=n066_CX5RpJz6DIxGMkKS3eGSRg03ilaCtsqfJWQb7Q,117
129
- application_sdk/workflows/query_extraction/sql.py,sha256=eNk_wCTRGVeBFAuJL-d-B1QXkVwDbpuVt06-fBNCxWg,5055
130
- atlan_application_sdk-0.1.1rc21.dist-info/METADATA,sha256=PRY0snWtCh8ZG6d8lhSoSihKsV-XLrBqnXiplmaiyxY,5445
131
- atlan_application_sdk-0.1.1rc21.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
132
- atlan_application_sdk-0.1.1rc21.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
133
- atlan_application_sdk-0.1.1rc21.dist-info/licenses/NOTICE,sha256=zlRshGtgfjXFcCKn_hwuILOFV9-NoKe0e2Tjy98_lrw,1044
134
- atlan_application_sdk-0.1.1rc21.dist-info/RECORD,,
129
+ application_sdk/workflows/query_extraction/sql.py,sha256=kT_JQkLCRZ44ZpaC4QvPL6DxnRIIVh8gYHLqRbMI-hA,4826
130
+ atlan_application_sdk-0.1.1rc23.dist-info/METADATA,sha256=bjjoev-FQHPYkxqGCS-LmSz4e20RucX23h7nwl-XqWY,5445
131
+ atlan_application_sdk-0.1.1rc23.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
132
+ atlan_application_sdk-0.1.1rc23.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
133
+ atlan_application_sdk-0.1.1rc23.dist-info/licenses/NOTICE,sha256=zlRshGtgfjXFcCKn_hwuILOFV9-NoKe0e2Tjy98_lrw,1044
134
+ atlan_application_sdk-0.1.1rc23.dist-info/RECORD,,