openmetadata-managed-apis 1.8.1.0__py3-none-any.whl → 1.10.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of openmetadata-managed-apis might be problematic. Click here for more details.

Files changed (19) hide show
  1. openmetadata_managed_apis/api/routes/last_dag_logs.py +14 -2
  2. openmetadata_managed_apis/api/utils.py +10 -0
  3. openmetadata_managed_apis/operations/last_dag_logs.py +148 -33
  4. openmetadata_managed_apis/workflows/ingestion/application.py +1 -0
  5. openmetadata_managed_apis/workflows/ingestion/auto_classification.py +1 -0
  6. openmetadata_managed_apis/workflows/ingestion/common.py +0 -1
  7. openmetadata_managed_apis/workflows/ingestion/dbt.py +1 -0
  8. openmetadata_managed_apis/workflows/ingestion/es_reindex.py +1 -0
  9. openmetadata_managed_apis/workflows/ingestion/lineage.py +1 -0
  10. openmetadata_managed_apis/workflows/ingestion/metadata.py +1 -0
  11. openmetadata_managed_apis/workflows/ingestion/profiler.py +1 -0
  12. openmetadata_managed_apis/workflows/ingestion/test_suite.py +1 -0
  13. openmetadata_managed_apis/workflows/ingestion/usage.py +1 -0
  14. {openmetadata_managed_apis-1.8.1.0.dist-info → openmetadata_managed_apis-1.10.5.0.dist-info}/METADATA +1 -1
  15. {openmetadata_managed_apis-1.8.1.0.dist-info → openmetadata_managed_apis-1.10.5.0.dist-info}/RECORD +19 -19
  16. {openmetadata_managed_apis-1.8.1.0.dist-info → openmetadata_managed_apis-1.10.5.0.dist-info}/LICENSE +0 -0
  17. {openmetadata_managed_apis-1.8.1.0.dist-info → openmetadata_managed_apis-1.10.5.0.dist-info}/WHEEL +0 -0
  18. {openmetadata_managed_apis-1.8.1.0.dist-info → openmetadata_managed_apis-1.10.5.0.dist-info}/entry_points.txt +0 -0
  19. {openmetadata_managed_apis-1.8.1.0.dist-info → openmetadata_managed_apis-1.10.5.0.dist-info}/top_level.txt +0 -0
@@ -16,7 +16,11 @@ from typing import Callable
16
16
 
17
17
  from flask import Blueprint, Response, request
18
18
  from openmetadata_managed_apis.api.response import ApiResponse
19
- from openmetadata_managed_apis.api.utils import get_arg_dag_id, get_request_arg
19
+ from openmetadata_managed_apis.api.utils import (
20
+ get_arg_dag_id,
21
+ get_request_arg,
22
+ sanitize_task_id,
23
+ )
20
24
  from openmetadata_managed_apis.operations.last_dag_logs import last_dag_logs
21
25
  from openmetadata_managed_apis.utils.logger import routes_logger
22
26
 
@@ -45,7 +49,15 @@ def get_fn(blueprint: Blueprint) -> Callable:
45
49
  """
46
50
 
47
51
  dag_id = get_arg_dag_id()
48
- task_id = get_request_arg(request, "task_id")
52
+ raw_task_id = get_request_arg(request, "task_id")
53
+ task_id = sanitize_task_id(raw_task_id)
54
+
55
+ if task_id is None:
56
+ return ApiResponse.error(
57
+ status=ApiResponse.STATUS_BAD_REQUEST,
58
+ error="Invalid or missing task_id parameter",
59
+ )
60
+
49
61
  after = get_request_arg(request, "after", raise_missing=False)
50
62
 
51
63
  try:
@@ -53,6 +53,16 @@ def clean_dag_id(raw_dag_id: Optional[str]) -> Optional[str]:
53
53
  return re.sub("[^0-9a-zA-Z-_]+", "_", raw_dag_id) if raw_dag_id else None
54
54
 
55
55
 
56
+ def sanitize_task_id(raw_task_id: Optional[str]) -> Optional[str]:
57
+ """
58
+ Sanitize task_id to prevent path traversal attacks.
59
+ Only allows alphanumeric characters, dashes, and underscores.
60
+ :param raw_task_id: Raw task ID from user input
61
+ :return: Sanitized task ID safe for file path construction
62
+ """
63
+ return re.sub("[^0-9a-zA-Z-_]+", "_", raw_task_id) if raw_task_id else None
64
+
65
+
56
66
  def get_request_arg(req, arg, raise_missing: bool = True) -> Optional[str]:
57
67
  """
58
68
  Pick up the `arg` from the flask `req`.
@@ -1,4 +1,4 @@
1
- # Copyright 2022 Collate
1
+ # Copyright 2025 Collate
2
2
  # Licensed under the Collate Community License, Version 1.0 (the "License");
3
3
  # you may not use this file except in compliance with the License.
4
4
  # You may obtain a copy of the License at
@@ -11,36 +11,67 @@
11
11
  """
12
12
  Module containing the logic to retrieve all logs from the tasks of a last DAG run
13
13
  """
14
- from functools import partial
14
+ import os
15
+ from functools import lru_cache, partial
15
16
  from io import StringIO
16
- from typing import List, Optional
17
+ from typing import List, Optional, Tuple
17
18
 
18
19
  from airflow.models import DagModel, TaskInstance
19
20
  from airflow.utils.log.log_reader import TaskLogReader
20
21
  from flask import Response
21
22
  from openmetadata_managed_apis.api.response import ApiResponse
23
+ from openmetadata_managed_apis.utils.logger import operations_logger
24
+
25
+ logger = operations_logger()
22
26
 
23
27
  LOG_METADATA = {
24
28
  "download_logs": False,
25
29
  }
26
- # Make chunks of 2M characters
27
30
  CHUNK_SIZE = 2_000_000
31
+ DOT_STR = "_DOT_"
28
32
 
29
33
 
30
- def last_dag_logs(dag_id: str, task_id: str, after: Optional[int] = None) -> Response:
31
- """Validate that the DAG is registered by Airflow and have at least one Run.
32
-
33
- If exists, returns all logs for each task instance of the last DAG run.
34
+ @lru_cache(maxsize=10)
35
+ def get_log_file_info(log_file_path: str, mtime: int) -> Tuple[int, int]:
36
+ """
37
+ Get total size and number of chunks for a log file.
38
+ :param log_file_path: Path to log file
39
+ :param mtime: File modification time in seconds (used as cache key)
40
+ :return: Tuple of (file_size_bytes, total_chunks)
41
+ """
42
+ file_size = os.path.getsize(log_file_path)
43
+ total_chunks = (file_size + CHUNK_SIZE - 1) // CHUNK_SIZE
44
+ return file_size, total_chunks
34
45
 
35
- Args:
36
- dag_id (str): DAG to look for
37
- task_id (str): Task to fetch logs from
38
- after (int): log stream cursor
39
46
 
40
- Return:
41
- Response with log and pagination
47
+ def read_log_chunk_from_file(file_path: str, chunk_index: int) -> Optional[str]:
48
+ """
49
+ Read a specific chunk from a log file without loading entire file.
50
+ :param file_path: Path to the log file
51
+ :param chunk_index: 0-based chunk index to read
52
+ :return: Log chunk content or None if error
42
53
  """
54
+ try:
55
+ offset = chunk_index * CHUNK_SIZE
56
+ with open(file_path, "r", encoding="utf-8", errors="replace") as f:
57
+ f.seek(offset)
58
+ chunk = f.read(CHUNK_SIZE)
59
+ return chunk
60
+ except Exception as exc:
61
+ logger.warning(f"Failed to read log chunk from {file_path}: {exc}")
62
+ return None
63
+
43
64
 
65
+ def last_dag_logs(dag_id: str, task_id: str, after: Optional[int] = None) -> Response:
66
+ """
67
+ Validate that the DAG is registered by Airflow and have at least one Run.
68
+ If exists, returns all logs for each task instance of the last DAG run.
69
+ Uses file streaming to avoid loading entire log file into memory.
70
+ :param dag_id: DAG to look for
71
+ :param task_id: Task to fetch logs from
72
+ :param after: log stream cursor
73
+ :return: Response with log and pagination
74
+ """
44
75
  dag_model = DagModel.get_dagmodel(dag_id=dag_id)
45
76
 
46
77
  if not dag_model:
@@ -58,32 +89,116 @@ def last_dag_logs(dag_id: str, task_id: str, after: Optional[int] = None) -> Res
58
89
  f"Cannot find any task instance for the last DagRun of {dag_id}."
59
90
  )
60
91
 
61
- raw_logs_str = None
62
-
92
+ target_task_instance = None
63
93
  for task_instance in task_instances:
64
- # Only fetch the required logs
65
94
  if task_instance.task_id == task_id:
66
- # Pick up the _try_number, otherwise they are adding 1
67
- try_number = task_instance._try_number # pylint: disable=protected-access
95
+ target_task_instance = task_instance
96
+ break
97
+
98
+ if not target_task_instance:
99
+ return ApiResponse.bad_request(f"Task {task_id} not found in DAG {dag_id}.")
100
+
101
+ try_number = target_task_instance._try_number # pylint: disable=protected-access
102
+
103
+ task_log_reader = TaskLogReader()
104
+ if not task_log_reader.supports_read:
105
+ return ApiResponse.server_error("Task Log Reader does not support read logs.")
106
+
107
+ # Try to use file streaming for better performance
108
+ try:
109
+
110
+ from airflow.configuration import ( # pylint: disable=import-outside-toplevel
111
+ conf,
112
+ )
68
113
 
69
- task_log_reader = TaskLogReader()
70
- if not task_log_reader.supports_read:
71
- return ApiResponse.server_error(
72
- "Task Log Reader does not support read logs."
114
+ base_log_folder = conf.get("logging", "base_log_folder")
115
+ # dag_id and task_id are already sanitized at route level
116
+ # Only dots are replaced for Airflow log path compatibility
117
+ dag_id_safe = dag_id.replace(".", DOT_STR)
118
+ task_id_safe = task_id.replace(".", DOT_STR)
119
+
120
+ log_relative_path = f"dag_id={dag_id_safe}/run_id={last_dag_run.run_id}/task_id={task_id_safe}/attempt={try_number}.log"
121
+ log_file_path = os.path.join(base_log_folder, log_relative_path)
122
+
123
+ # Security: Validate the resolved path stays within base_log_folder
124
+ # to prevent directory traversal attacks. This provides defense-in-depth
125
+ # even though dag_id and task_id are already sanitized at the route level.
126
+ log_file_path_real = os.path.realpath(log_file_path)
127
+ base_log_folder_real = os.path.realpath(base_log_folder)
128
+
129
+ if not log_file_path_real.startswith(base_log_folder_real + os.sep):
130
+ logger.warning(
131
+ f"Path traversal attempt detected: {log_file_path} is outside {base_log_folder}"
132
+ )
133
+ return ApiResponse.bad_request(
134
+ f"Invalid log path for DAG {dag_id} and Task {task_id}."
135
+ )
136
+
137
+ if os.path.exists(log_file_path_real):
138
+ stat_info = os.stat(log_file_path_real)
139
+ file_mtime = int(stat_info.st_mtime)
140
+
141
+ _, total_chunks = get_log_file_info(log_file_path_real, file_mtime)
142
+
143
+ after_idx = int(after) if after is not None else 0
144
+
145
+ if after_idx >= total_chunks:
146
+ return ApiResponse.bad_request(
147
+ f"After index {after} is out of bounds. Total pagination is {total_chunks} for DAG {dag_id} and Task {task_id}."
73
148
  )
74
149
 
75
- # Even when generating a ton of logs, we just get a single element.
76
- # Same happens when trying to call task_log_reader.read_log_chunks
77
- # We'll create our own chunk size and paginate based on that
78
- raw_logs_str = "".join(
79
- list(
80
- task_log_reader.read_log_stream(
81
- ti=task_instance,
82
- try_number=try_number,
83
- metadata=LOG_METADATA,
84
- )
150
+ chunk_content = read_log_chunk_from_file(log_file_path_real, after_idx)
151
+
152
+ if chunk_content is not None:
153
+ return ApiResponse.success(
154
+ {
155
+ task_id: chunk_content,
156
+ "total": total_chunks,
157
+ **(
158
+ {"after": after_idx + 1}
159
+ if after_idx < total_chunks - 1
160
+ else {}
161
+ ),
162
+ }
85
163
  )
164
+ except Exception as exc:
165
+ logger.debug(
166
+ f"File streaming failed for DAG {dag_id}, falling back to TaskLogReader: {exc}"
167
+ )
168
+
169
+ # Fallback to TaskLogReader if streaming fails
170
+ return _last_dag_logs_fallback(
171
+ dag_id, task_id, after, target_task_instance, task_log_reader, try_number
172
+ )
173
+
174
+
175
+ def _last_dag_logs_fallback(
176
+ dag_id: str,
177
+ task_id: str,
178
+ after: Optional[int],
179
+ task_instance: TaskInstance,
180
+ task_log_reader: TaskLogReader,
181
+ try_number: int,
182
+ ) -> Response:
183
+ """
184
+ Fallback to reading entire log file into memory (old behavior).
185
+ :param dag_id: DAG to look for
186
+ :param task_id: Task to fetch logs from
187
+ :param after: log stream cursor
188
+ :param task_instance: Task instance to fetch logs from
189
+ :param task_log_reader: TaskLogReader instance
190
+ :param try_number: Task attempt number
191
+ :return: API Response
192
+ """
193
+ raw_logs_str = "".join(
194
+ list(
195
+ task_log_reader.read_log_stream(
196
+ ti=task_instance,
197
+ try_number=try_number,
198
+ metadata=LOG_METADATA,
86
199
  )
200
+ )
201
+ )
87
202
 
88
203
  if not raw_logs_str:
89
204
  return ApiResponse.bad_request(
@@ -89,6 +89,7 @@ def build_application_workflow_config(
89
89
  else None,
90
90
  workflowConfig=build_workflow_config_property(ingestion_pipeline),
91
91
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
92
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
92
93
  )
93
94
 
94
95
  return application_workflow_config
@@ -76,6 +76,7 @@ def build_auto_classification_workflow_config(
76
76
  openMetadataServerConfig=ingestion_pipeline.openMetadataServerConnection,
77
77
  ),
78
78
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
79
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
79
80
  )
80
81
 
81
82
  return workflow_config
@@ -200,7 +200,6 @@ def execute_workflow(
200
200
  Execute the workflow and handle the status
201
201
  """
202
202
  workflow.execute()
203
- workflow.print_status()
204
203
  workflow.stop()
205
204
  if workflow_config.workflowConfig.raiseOnError:
206
205
  workflow.raise_from_status()
@@ -52,6 +52,7 @@ def build_dbt_workflow_config(
52
52
  ),
53
53
  workflowConfig=build_workflow_config_property(ingestion_pipeline),
54
54
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
55
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
55
56
  )
56
57
 
57
58
  return workflow_config
@@ -74,6 +74,7 @@ def build_es_reindex_workflow_config(
74
74
  openMetadataServerConfig=ingestion_pipeline.openMetadataServerConnection,
75
75
  ),
76
76
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
77
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
77
78
  )
78
79
 
79
80
  return workflow_config
@@ -47,6 +47,7 @@ def build_lineage_workflow_config(
47
47
  ),
48
48
  workflowConfig=build_workflow_config_property(ingestion_pipeline),
49
49
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
50
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
50
51
  )
51
52
 
52
53
  return workflow_config
@@ -44,6 +44,7 @@ def build_metadata_workflow_config(
44
44
  ),
45
45
  workflowConfig=build_workflow_config_property(ingestion_pipeline),
46
46
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
47
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
47
48
  )
48
49
 
49
50
  return workflow_config
@@ -76,6 +76,7 @@ def build_profiler_workflow_config(
76
76
  openMetadataServerConfig=ingestion_pipeline.openMetadataServerConnection,
77
77
  ),
78
78
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
79
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
79
80
  )
80
81
 
81
82
  return workflow_config
@@ -77,6 +77,7 @@ def build_test_suite_workflow_config(
77
77
  openMetadataServerConfig=ingestion_pipeline.openMetadataServerConnection,
78
78
  ),
79
79
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
80
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
80
81
  )
81
82
 
82
83
  return workflow_config
@@ -83,6 +83,7 @@ def build_usage_config_from_file(
83
83
  ),
84
84
  workflowConfig=build_workflow_config_property(ingestion_pipeline),
85
85
  ingestionPipelineFQN=ingestion_pipeline.fullyQualifiedName.root,
86
+ enableStreamableLogs=ingestion_pipeline.enableStreamableLogs,
86
87
  )
87
88
 
88
89
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openmetadata_managed_apis
3
- Version: 1.8.1.0
3
+ Version: 1.10.5.0
4
4
  Summary: Airflow REST APIs to create and manage DAGS
5
5
  Author: OpenMetadata Committers
6
6
  License: Apache License
@@ -6,7 +6,7 @@ openmetadata_managed_apis/api/app.py,sha256=dKPK8-NvFr3xJdRAEvyaZ_b1ISN3NKmHQEhH
6
6
  openmetadata_managed_apis/api/config.py,sha256=mkhl8-jw7_ayNRO9_w7bvxpeJmoMl4ikHcvcayOEZas,1392
7
7
  openmetadata_managed_apis/api/error_handlers.py,sha256=N5NrJlRwmhFdLDiy61lzpD2TjWP3ytasmMGV6r0Sxbc,1403
8
8
  openmetadata_managed_apis/api/response.py,sha256=tc4Vgd0qmd33eEaBx31r6Gi5KopLEwEcQ8DX9Rd90gE,2768
9
- openmetadata_managed_apis/api/utils.py,sha256=NfWISKaepOqEEdrGU-Z1q7-3ahlNhBGzSD5yC2hawbk,4995
9
+ openmetadata_managed_apis/api/utils.py,sha256=qePHXsFS8IuRYgA1G_ewW1VJQtdE-TvbsbBOVtxBWcw,5397
10
10
  openmetadata_managed_apis/api/routes/__init__.py,sha256=Fx0Abo8-NkK39Y3Xw2XzLnphDM_TiSniZfVL29zHTxw,217
11
11
  openmetadata_managed_apis/api/routes/delete.py,sha256=tI-1Y12H_8UNOUQW_p_05KXtcGqzO1rSzPsVVoNopFE,2526
12
12
  openmetadata_managed_apis/api/routes/deploy.py,sha256=KvCY36qzNSd2T9jV1I5E_TmlQF-16JMbjmbqN1TTOjI,3243
@@ -16,7 +16,7 @@ openmetadata_managed_apis/api/routes/health.py,sha256=7w1qXN-zKPSbQEq3WOGQJ5SPsH
16
16
  openmetadata_managed_apis/api/routes/health_auth.py,sha256=vwfKDZDs_kUefFSZAE-2Apna2SY45ZjlPPhzGACIZ1o,1679
17
17
  openmetadata_managed_apis/api/routes/ip.py,sha256=JNB28JfjRiu3__pX1w4XdQqVM5LQ-AJ48CpFDCmZ1wI,2969
18
18
  openmetadata_managed_apis/api/routes/kill.py,sha256=hHYJnYonSr2M9br0Hwv6-y5mZa7x_yiTX1g25bkzy98,2178
19
- openmetadata_managed_apis/api/routes/last_dag_logs.py,sha256=fum-ugonKO4nKtaTep9AZmnURIxOfx6Uh27s68Qkum0,2387
19
+ openmetadata_managed_apis/api/routes/last_dag_logs.py,sha256=v_81JaH626xBK_tRonZmB8i1Lf23E8cumSwMjWrRBd8,2673
20
20
  openmetadata_managed_apis/api/routes/run_automation.py,sha256=axlkIk87rmfC-eeoYPkIevJWzQ6FQz93D2UHbiIgRXM,3344
21
21
  openmetadata_managed_apis/api/routes/status.py,sha256=6fIWqBXS_77koEdaZQKjyAkdfc5QiYbPatIsq7V2WvQ,2227
22
22
  openmetadata_managed_apis/api/routes/trigger.py,sha256=objSIrDpUcKtJLdqwR1PMwRm1LzHGeQpxe4T-0HJhGw,2364
@@ -25,7 +25,7 @@ openmetadata_managed_apis/operations/delete.py,sha256=aduDpBiDcvcOtA606GFZcw7Qxj
25
25
  openmetadata_managed_apis/operations/deploy.py,sha256=6g4rqsSDgKSh9rb-xxTTkELxbNT383XKeaEX1MuhH7g,7235
26
26
  openmetadata_managed_apis/operations/health.py,sha256=pWyRqAflxsqc3ml2zHddRnWZG6tSyNwDPLdQA8ZspCU,1418
27
27
  openmetadata_managed_apis/operations/kill_all.py,sha256=nMRfuc45oZL1G2k9ufBUgmMCTsDu1CmeU0L7dYJE0nY,2340
28
- openmetadata_managed_apis/operations/last_dag_logs.py,sha256=ioABe_dZBZqmxhK-Ig4ew-S9scdVePYWESw-dO_MnKY,4060
28
+ openmetadata_managed_apis/operations/last_dag_logs.py,sha256=8vaQuHFmHvMJXjeLV6Icy7oJ_cQmhzn-3b98g-fWPSE,8467
29
29
  openmetadata_managed_apis/operations/state.py,sha256=6eTL0iS3LcWOJmcLE66HBtyVxw68FH8SLWrxSuv6ZLg,2049
30
30
  openmetadata_managed_apis/operations/status.py,sha256=9ZFEgVHggQRDPXLFYZ3eqqHhVl7qg008nPL0VdzUFto,1858
31
31
  openmetadata_managed_apis/operations/trigger.py,sha256=KAl6qF6vKpwdAtvns9KXyBQ-DfSb02BLc2_-mly-4FM,1326
@@ -42,21 +42,21 @@ openmetadata_managed_apis/workflows/config.py,sha256=CsltqC736TbpgwjZUM44_StZLuj
42
42
  openmetadata_managed_apis/workflows/workflow_builder.py,sha256=ZPaS6bXgQgTSfuXO9y3q8Yn8NxBwnO2Gxiygy2fE8t4,2080
43
43
  openmetadata_managed_apis/workflows/workflow_factory.py,sha256=HmyTHQ_WuKBTBQ09vk7InULOQ-01LnwijTAgKU7PPxs,3150
44
44
  openmetadata_managed_apis/workflows/ingestion/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
- openmetadata_managed_apis/workflows/ingestion/application.py,sha256=tWzI4LX1K1uNouA3cz35Qw77AwmYth2FOGVw8GGUgfY,3925
46
- openmetadata_managed_apis/workflows/ingestion/auto_classification.py,sha256=NhnGAfEj9MjCu_ANAC4q6o7rQltiIIEqs9DaE5f2Wf0,3071
47
- openmetadata_managed_apis/workflows/ingestion/common.py,sha256=wNAZurl59_aCuAKV8-H5QQbs1bljW-6LhbiJQrsGAUY,15346
48
- openmetadata_managed_apis/workflows/ingestion/dbt.py,sha256=Hbl5ikbV_0lyZs85QnrNceKFJeoVChEYu_IkE0EM6tM,2160
45
+ openmetadata_managed_apis/workflows/ingestion/application.py,sha256=xLfAZE2wwg-I9KyamLCcLOR_mk14bcfC5dgDB5HA1o0,3995
46
+ openmetadata_managed_apis/workflows/ingestion/auto_classification.py,sha256=h9k0MwQCZ1tKQAKv1lcxJeEPfJV4J6K1PAahuXg89AU,3141
47
+ openmetadata_managed_apis/workflows/ingestion/common.py,sha256=-wqRvL5bJid94ITkNDrEdfNlqJtNiMF5jHfFT3QSeqM,15318
48
+ openmetadata_managed_apis/workflows/ingestion/dbt.py,sha256=fBi2WiXwxNzma0gKpXXbqcP-Me9tSIZNYWjE-j76CSs,2230
49
49
  openmetadata_managed_apis/workflows/ingestion/elasticsearch_sink.py,sha256=blXh5m4mlTNGkshXP-5sWZXlU9aDPutWWEoaG51SWqU,2049
50
- openmetadata_managed_apis/workflows/ingestion/es_reindex.py,sha256=xi1sCAeFKgW7_Wci1bt6KIjbAGGMDRx6zJ8XHV7t_gw,3423
51
- openmetadata_managed_apis/workflows/ingestion/lineage.py,sha256=dvMMlSGNi-5h5ZUP92VJxQ4Eefl3P30dHDE26RbkfrI,2147
52
- openmetadata_managed_apis/workflows/ingestion/metadata.py,sha256=HE0OZSMF_w8Nh0FhvVg9m-X8tAVpe4pkBysEi3XyHMo,2058
53
- openmetadata_managed_apis/workflows/ingestion/profiler.py,sha256=Kn7KL4MjSbpS2QYyvX-IUWTAzrCnhkOYMhUoGnXI1tM,2957
50
+ openmetadata_managed_apis/workflows/ingestion/es_reindex.py,sha256=-fCbq7dHA2KW2Fe_9HX8vaYS7odIYMpvZFwaxBuw9LY,3493
51
+ openmetadata_managed_apis/workflows/ingestion/lineage.py,sha256=Vgp1THnBniFMiqlwEtfgAYuy11kjZPuNBCFwjRkvQuU,2217
52
+ openmetadata_managed_apis/workflows/ingestion/metadata.py,sha256=-m7duehwM99WWrsnnxkCKeFFz0n1Ofo5UKudFWVtn84,2128
53
+ openmetadata_managed_apis/workflows/ingestion/profiler.py,sha256=-_h73OgZyKum3r0iUbvoP0MtuvwVbbTrY2G1qFiYDAk,3027
54
54
  openmetadata_managed_apis/workflows/ingestion/registry.py,sha256=xbB6NdHrxEYUdBHxyrcJkyVGYKhuSI1WoLVvWeWCdz0,2356
55
- openmetadata_managed_apis/workflows/ingestion/test_suite.py,sha256=bwNxr0vp1yoCi3dXm094-PrUQfg1UJpV95thoPFuUNM,2964
56
- openmetadata_managed_apis/workflows/ingestion/usage.py,sha256=tr3ap1rU45YHgKnZv1QaZ2npXbWtbgpxSnkfdJqZai8,3719
57
- openmetadata_managed_apis-1.8.1.0.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
58
- openmetadata_managed_apis-1.8.1.0.dist-info/METADATA,sha256=Axb6GAV9-8uBZWsbycn3Hx_BCAAV3d6OWIZO3jHrVVI,21780
59
- openmetadata_managed_apis-1.8.1.0.dist-info/WHEEL,sha256=Z4pYXqR_rTB7OWNDYFOm1qRk0RX6GFP2o8LgvP453Hk,91
60
- openmetadata_managed_apis-1.8.1.0.dist-info/entry_points.txt,sha256=zknKSL_4J9dZRwSySrmb5sPnSMpmIpZ4_k-Aa4uwUHQ,93
61
- openmetadata_managed_apis-1.8.1.0.dist-info/top_level.txt,sha256=GygnYc036LyoClYqtk72V3gk7sX5fAKAr9fggnvxNgA,26
62
- openmetadata_managed_apis-1.8.1.0.dist-info/RECORD,,
55
+ openmetadata_managed_apis/workflows/ingestion/test_suite.py,sha256=mDir-Oz1RgTpQRAeFcwb2rWUlc3tddWO8uQWPv9soe0,3034
56
+ openmetadata_managed_apis/workflows/ingestion/usage.py,sha256=Ate2K4OE0yoa9a8oW3DMivGI09YdHWYk8m2n0f2RucI,3789
57
+ openmetadata_managed_apis-1.10.5.0.dist-info/LICENSE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
58
+ openmetadata_managed_apis-1.10.5.0.dist-info/METADATA,sha256=UslrYLiGKypBNuU25Z09yK8OOsvIbGZ7LFOFKrFJcds,21781
59
+ openmetadata_managed_apis-1.10.5.0.dist-info/WHEEL,sha256=Z4pYXqR_rTB7OWNDYFOm1qRk0RX6GFP2o8LgvP453Hk,91
60
+ openmetadata_managed_apis-1.10.5.0.dist-info/entry_points.txt,sha256=zknKSL_4J9dZRwSySrmb5sPnSMpmIpZ4_k-Aa4uwUHQ,93
61
+ openmetadata_managed_apis-1.10.5.0.dist-info/top_level.txt,sha256=GygnYc036LyoClYqtk72V3gk7sX5fAKAr9fggnvxNgA,26
62
+ openmetadata_managed_apis-1.10.5.0.dist-info/RECORD,,