apache-airflow-providers-teradata 3.2.1rc1__py3-none-any.whl → 3.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- airflow/providers/teradata/__init__.py +1 -1
- airflow/providers/teradata/get_provider_info.py +14 -1
- airflow/providers/teradata/hooks/bteq.py +15 -17
- airflow/providers/teradata/hooks/teradata.py +25 -8
- airflow/providers/teradata/hooks/ttu.py +1 -1
- airflow/providers/teradata/operators/bteq.py +10 -23
- airflow/providers/teradata/operators/teradata.py +2 -6
- airflow/providers/teradata/operators/teradata_compute_cluster.py +46 -44
- airflow/providers/teradata/transfers/azure_blob_to_teradata.py +2 -2
- airflow/providers/teradata/transfers/s3_to_teradata.py +2 -6
- airflow/providers/teradata/transfers/teradata_to_teradata.py +2 -6
- airflow/providers/teradata/triggers/teradata_compute_cluster.py +6 -7
- airflow/providers/teradata/utils/constants.py +14 -11
- airflow/providers/teradata/version_compat.py +1 -11
- {apache_airflow_providers_teradata-3.2.1rc1.dist-info → apache_airflow_providers_teradata-3.2.3.dist-info}/METADATA +34 -19
- apache_airflow_providers_teradata-3.2.3.dist-info/RECORD +27 -0
- apache_airflow_providers_teradata-3.2.3.dist-info/licenses/NOTICE +5 -0
- apache_airflow_providers_teradata-3.2.1rc1.dist-info/RECORD +0 -26
- {apache_airflow_providers_teradata-3.2.1rc1.dist-info → apache_airflow_providers_teradata-3.2.3.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_teradata-3.2.1rc1.dist-info → apache_airflow_providers_teradata-3.2.3.dist-info}/entry_points.txt +0 -0
- {airflow/providers/teradata → apache_airflow_providers_teradata-3.2.3.dist-info/licenses}/LICENSE +0 -0
|
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
|
|
|
29
29
|
|
|
30
30
|
__all__ = ["__version__"]
|
|
31
31
|
|
|
32
|
-
__version__ = "3.2.
|
|
32
|
+
__version__ = "3.2.3"
|
|
33
33
|
|
|
34
34
|
if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
|
|
35
35
|
"2.10.0"
|
|
@@ -36,7 +36,20 @@ def get_provider_info():
|
|
|
36
36
|
],
|
|
37
37
|
"logo": "/docs/integration-logos/Teradata.png",
|
|
38
38
|
"tags": ["software"],
|
|
39
|
-
}
|
|
39
|
+
},
|
|
40
|
+
{
|
|
41
|
+
"integration-name": "Bteq",
|
|
42
|
+
"external-doc-url": "https://www.teradata.com/",
|
|
43
|
+
"how-to-guide": ["/docs/apache-airflow-providers-teradata/operators/bteq.rst"],
|
|
44
|
+
"logo": "/docs/integration-logos/Teradata.png",
|
|
45
|
+
"tags": ["software"],
|
|
46
|
+
},
|
|
47
|
+
{
|
|
48
|
+
"integration-name": "Ttu",
|
|
49
|
+
"external-doc-url": "https://www.teradata.com/",
|
|
50
|
+
"logo": "/docs/integration-logos/Teradata.png",
|
|
51
|
+
"tags": ["software"],
|
|
52
|
+
},
|
|
40
53
|
],
|
|
41
54
|
"operators": [
|
|
42
55
|
{
|
|
@@ -37,6 +37,7 @@ from airflow.providers.teradata.utils.bteq_util import (
|
|
|
37
37
|
verify_bteq_installed,
|
|
38
38
|
verify_bteq_installed_remote,
|
|
39
39
|
)
|
|
40
|
+
from airflow.providers.teradata.utils.constants import Constants
|
|
40
41
|
from airflow.providers.teradata.utils.encryption_utils import (
|
|
41
42
|
decrypt_remote_file_to_string,
|
|
42
43
|
generate_encrypted_file_with_openssl,
|
|
@@ -158,7 +159,7 @@ class BteqHook(TtuHook):
|
|
|
158
159
|
if self.ssh_hook and self.ssh_hook.get_conn():
|
|
159
160
|
with self.ssh_hook.get_conn() as ssh_client:
|
|
160
161
|
if ssh_client is None:
|
|
161
|
-
raise AirflowException(
|
|
162
|
+
raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
|
|
162
163
|
verify_bteq_installed_remote(ssh_client)
|
|
163
164
|
password = generate_random_password() # Encryption/Decryption password
|
|
164
165
|
encrypted_file_path = os.path.join(tmp_dir, "bteq_script.enc")
|
|
@@ -170,7 +171,6 @@ class BteqHook(TtuHook):
|
|
|
170
171
|
)
|
|
171
172
|
remote_encrypted_path = os.path.join(remote_working_dir or "", "bteq_script.enc")
|
|
172
173
|
remote_encrypted_path = remote_encrypted_path.replace("/", "\\")
|
|
173
|
-
|
|
174
174
|
transfer_file_sftp(ssh_client, encrypted_file_path, remote_encrypted_path)
|
|
175
175
|
|
|
176
176
|
bteq_command_str = prepare_bteq_command_for_remote_execution(
|
|
@@ -204,24 +204,20 @@ class BteqHook(TtuHook):
|
|
|
204
204
|
else [bteq_quit_rc if bteq_quit_rc is not None else 0]
|
|
205
205
|
)
|
|
206
206
|
):
|
|
207
|
-
raise AirflowException(f"
|
|
207
|
+
raise AirflowException(f"Failed to execute BTEQ script : {failure_message}")
|
|
208
208
|
if failure_message:
|
|
209
209
|
self.log.warning(failure_message)
|
|
210
210
|
return exit_status
|
|
211
211
|
else:
|
|
212
|
-
raise AirflowException(
|
|
212
|
+
raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
|
|
213
213
|
except (OSError, socket.gaierror):
|
|
214
|
-
raise AirflowException(
|
|
215
|
-
"SSH connection timed out. Please check the network or server availability."
|
|
216
|
-
)
|
|
214
|
+
raise AirflowException(Constants.BTEQ_REMOTE_ERROR_MSG)
|
|
217
215
|
except SSHException as e:
|
|
218
|
-
raise AirflowException(f"
|
|
216
|
+
raise AirflowException(f"{Constants.BTEQ_REMOTE_ERROR_MSG}: {str(e)}")
|
|
219
217
|
except AirflowException as e:
|
|
220
218
|
raise e
|
|
221
219
|
except Exception as e:
|
|
222
|
-
raise AirflowException(
|
|
223
|
-
f"An unexpected error occurred while executing BTEQ script on remote machine: {str(e)}"
|
|
224
|
-
)
|
|
220
|
+
raise AirflowException(f"{Constants.BTEQ_REMOTE_ERROR_MSG}: {str(e)}")
|
|
225
221
|
finally:
|
|
226
222
|
# Remove the local script file
|
|
227
223
|
if encrypted_file_path and os.path.exists(encrypted_file_path):
|
|
@@ -267,7 +263,7 @@ class BteqHook(TtuHook):
|
|
|
267
263
|
stdout=subprocess.PIPE,
|
|
268
264
|
stderr=subprocess.STDOUT,
|
|
269
265
|
shell=True,
|
|
270
|
-
|
|
266
|
+
start_new_session=True,
|
|
271
267
|
)
|
|
272
268
|
encode_bteq_script = bteq_script.encode(str(temp_file_read_encoding or "UTF-8"))
|
|
273
269
|
stdout_data, _ = process.communicate(input=encode_bteq_script)
|
|
@@ -276,12 +272,12 @@ class BteqHook(TtuHook):
|
|
|
276
272
|
process.wait(timeout=timeout + 60) # Adding 1 minute extra for BTEQ script timeout
|
|
277
273
|
except subprocess.TimeoutExpired:
|
|
278
274
|
self.on_kill()
|
|
279
|
-
raise AirflowException(
|
|
275
|
+
raise AirflowException(Constants.BTEQ_TIMEOUT_ERROR_MSG, timeout)
|
|
280
276
|
conn = self.get_conn()
|
|
281
277
|
conn["sp"] = process # For `on_kill` support
|
|
282
278
|
failure_message = None
|
|
283
279
|
if stdout_data is None:
|
|
284
|
-
raise AirflowException(
|
|
280
|
+
raise AirflowException(Constants.BTEQ_UNEXPECTED_ERROR_MSG)
|
|
285
281
|
decoded_line = ""
|
|
286
282
|
for line in stdout_data.splitlines():
|
|
287
283
|
try:
|
|
@@ -302,7 +298,7 @@ class BteqHook(TtuHook):
|
|
|
302
298
|
else [bteq_quit_rc if bteq_quit_rc is not None else 0]
|
|
303
299
|
)
|
|
304
300
|
):
|
|
305
|
-
raise AirflowException(f"
|
|
301
|
+
raise AirflowException(f"{Constants.BTEQ_UNEXPECTED_ERROR_MSG}: {failure_message}")
|
|
306
302
|
if failure_message:
|
|
307
303
|
self.log.warning(failure_message)
|
|
308
304
|
|
|
@@ -320,7 +316,7 @@ class BteqHook(TtuHook):
|
|
|
320
316
|
self.log.warning("Subprocess did not terminate in time. Forcing kill...")
|
|
321
317
|
process.kill()
|
|
322
318
|
except Exception as e:
|
|
323
|
-
self.log.error("
|
|
319
|
+
self.log.error("%s : %s", Constants.BTEQ_UNEXPECTED_ERROR_MSG, str(e))
|
|
324
320
|
|
|
325
321
|
def get_airflow_home_dir(self) -> str:
|
|
326
322
|
"""Get the AIRFLOW_HOME directory."""
|
|
@@ -331,7 +327,9 @@ class BteqHook(TtuHook):
|
|
|
331
327
|
try:
|
|
332
328
|
temp_dir = tempfile.gettempdir()
|
|
333
329
|
if not os.path.isdir(temp_dir) or not os.access(temp_dir, os.W_OK):
|
|
334
|
-
raise OSError(
|
|
330
|
+
raise OSError(
|
|
331
|
+
f"Failed to execute the BTEQ script due to Temporary directory {temp_dir} is not writable."
|
|
332
|
+
)
|
|
335
333
|
except Exception:
|
|
336
334
|
temp_dir = self.get_airflow_home_dir()
|
|
337
335
|
|
|
@@ -22,8 +22,8 @@ from __future__ import annotations
|
|
|
22
22
|
import re
|
|
23
23
|
from typing import TYPE_CHECKING, Any
|
|
24
24
|
|
|
25
|
-
import sqlalchemy
|
|
26
25
|
import teradatasql
|
|
26
|
+
from sqlalchemy.engine import URL
|
|
27
27
|
from teradatasql import TeradataConnection
|
|
28
28
|
|
|
29
29
|
from airflow.providers.common.sql.hooks.sql import DbApiHook
|
|
@@ -34,6 +34,7 @@ if TYPE_CHECKING:
|
|
|
34
34
|
except ImportError:
|
|
35
35
|
from airflow.models.connection import Connection # type: ignore[assignment]
|
|
36
36
|
|
|
37
|
+
DEFAULT_DB_PORT = 1025
|
|
37
38
|
PARAM_TYPES = {bool, float, int, str}
|
|
38
39
|
|
|
39
40
|
|
|
@@ -166,7 +167,7 @@ class TeradataHook(DbApiHook):
|
|
|
166
167
|
conn: Connection = self.get_connection(self.get_conn_id())
|
|
167
168
|
conn_config = {
|
|
168
169
|
"host": conn.host or "localhost",
|
|
169
|
-
"dbs_port": conn.port or
|
|
170
|
+
"dbs_port": conn.port or DEFAULT_DB_PORT,
|
|
170
171
|
"database": conn.schema or "",
|
|
171
172
|
"user": conn.login or "dbc",
|
|
172
173
|
"password": conn.password or "dbc",
|
|
@@ -195,12 +196,28 @@ class TeradataHook(DbApiHook):
|
|
|
195
196
|
|
|
196
197
|
return conn_config
|
|
197
198
|
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
return
|
|
199
|
+
@property
|
|
200
|
+
def sqlalchemy_url(self) -> URL:
|
|
201
|
+
"""
|
|
202
|
+
Override to return a Sqlalchemy.engine.URL object from the Teradata connection.
|
|
203
|
+
|
|
204
|
+
:return: the extracted sqlalchemy.engine.URL object.
|
|
205
|
+
"""
|
|
206
|
+
connection = self.get_connection(self.get_conn_id())
|
|
207
|
+
# Adding only teradatasqlalchemy supported connection parameters.
|
|
208
|
+
# https://pypi.org/project/teradatasqlalchemy/#ConnectionParameters
|
|
209
|
+
return URL.create(
|
|
210
|
+
drivername="teradatasql",
|
|
211
|
+
username=connection.login,
|
|
212
|
+
password=connection.password,
|
|
213
|
+
host=connection.host,
|
|
214
|
+
port=connection.port,
|
|
215
|
+
database=connection.schema if connection.schema else None,
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
def get_uri(self) -> str:
|
|
219
|
+
"""Override DbApiHook get_uri method for get_sqlalchemy_engine()."""
|
|
220
|
+
return self.sqlalchemy_url.render_as_string()
|
|
204
221
|
|
|
205
222
|
@staticmethod
|
|
206
223
|
def get_ui_field_behaviour() -> dict:
|
|
@@ -22,7 +22,7 @@ from abc import ABC
|
|
|
22
22
|
from typing import Any
|
|
23
23
|
|
|
24
24
|
from airflow.exceptions import AirflowException
|
|
25
|
-
from airflow.providers.
|
|
25
|
+
from airflow.providers.common.compat.sdk import BaseHook
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
class TtuHook(BaseHook, ABC):
|
|
@@ -27,19 +27,16 @@ from airflow.providers.teradata.utils.bteq_util import (
|
|
|
27
27
|
prepare_bteq_script_for_remote_execution,
|
|
28
28
|
read_file,
|
|
29
29
|
)
|
|
30
|
+
from airflow.providers.teradata.utils.constants import Constants
|
|
30
31
|
|
|
31
32
|
if TYPE_CHECKING:
|
|
32
33
|
from paramiko import SSHClient
|
|
33
34
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
except ImportError:
|
|
37
|
-
from airflow.utils.context import Context
|
|
38
|
-
|
|
35
|
+
from airflow.providers.common.compat.sdk import Context
|
|
36
|
+
from airflow.providers.common.compat.sdk import BaseOperator
|
|
39
37
|
from airflow.providers.ssh.hooks.ssh import SSHHook
|
|
40
38
|
from airflow.providers.teradata.hooks.bteq import BteqHook
|
|
41
39
|
from airflow.providers.teradata.hooks.teradata import TeradataHook
|
|
42
|
-
from airflow.providers.teradata.version_compat import BaseOperator
|
|
43
40
|
|
|
44
41
|
|
|
45
42
|
def contains_template(parameter_value):
|
|
@@ -114,9 +111,7 @@ class BteqOperator(BaseOperator):
|
|
|
114
111
|
def execute(self, context: Context) -> int | None:
|
|
115
112
|
"""Execute BTEQ code using the BteqHook."""
|
|
116
113
|
if not self.sql and not self.file_path:
|
|
117
|
-
raise ValueError(
|
|
118
|
-
"BteqOperator requires either the 'sql' or 'file_path' parameter. Both are missing."
|
|
119
|
-
)
|
|
114
|
+
raise ValueError(Constants.BTEQ_MISSED_PARAMS)
|
|
120
115
|
self._hook = BteqHook(teradata_conn_id=self.teradata_conn_id, ssh_conn_id=self.ssh_conn_id)
|
|
121
116
|
self._ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id) if self.ssh_conn_id else None
|
|
122
117
|
|
|
@@ -159,15 +154,13 @@ class BteqOperator(BaseOperator):
|
|
|
159
154
|
)
|
|
160
155
|
if self.file_path:
|
|
161
156
|
if not is_valid_file(self.file_path):
|
|
162
|
-
raise ValueError(
|
|
163
|
-
f"The provided file path '{self.file_path}' is invalid or does not exist."
|
|
164
|
-
)
|
|
157
|
+
raise ValueError(Constants.BTEQ_INVALID_PATH % self.file_path)
|
|
165
158
|
try:
|
|
166
159
|
is_valid_encoding(self.file_path, self.temp_file_read_encoding or "UTF-8")
|
|
167
160
|
except UnicodeDecodeError as e:
|
|
168
|
-
errmsg =
|
|
161
|
+
errmsg = Constants.BTEQ_INVALID_CHARSET % (self.file_path, "UTF-8")
|
|
169
162
|
if self.bteq_script_encoding:
|
|
170
|
-
errmsg =
|
|
163
|
+
errmsg = Constants.BTEQ_INVALID_CHARSET % (self.file_path, self.bteq_script_encoding)
|
|
171
164
|
raise ValueError(errmsg) from e
|
|
172
165
|
return self._handle_local_bteq_file(
|
|
173
166
|
file_path=self.file_path,
|
|
@@ -200,13 +193,9 @@ class BteqOperator(BaseOperator):
|
|
|
200
193
|
file_path=self.file_path,
|
|
201
194
|
context=context,
|
|
202
195
|
)
|
|
203
|
-
raise ValueError(
|
|
204
|
-
f"The provided remote file path '{self.file_path}' is invalid or file does not exist on remote machine at given path."
|
|
205
|
-
)
|
|
196
|
+
raise ValueError(Constants.BTEQ_REMOTE_FILE_PATH_INVALID % self.file_path)
|
|
206
197
|
else:
|
|
207
|
-
raise ValueError(
|
|
208
|
-
"BteqOperator requires either the 'sql' or 'file_path' parameter. Both are missing."
|
|
209
|
-
)
|
|
198
|
+
raise ValueError(Constants.BTEQ_MISSED_PARAMS)
|
|
210
199
|
return None
|
|
211
200
|
|
|
212
201
|
def _handle_remote_bteq_file(
|
|
@@ -242,9 +231,7 @@ class BteqOperator(BaseOperator):
|
|
|
242
231
|
self.temp_file_read_encoding,
|
|
243
232
|
)
|
|
244
233
|
return None
|
|
245
|
-
raise ValueError(
|
|
246
|
-
"Please provide a valid file path for the BTEQ script to be executed on the remote machine."
|
|
247
|
-
)
|
|
234
|
+
raise ValueError(Constants.BTEQ_MISSED_PARAMS)
|
|
248
235
|
|
|
249
236
|
def _handle_local_bteq_file(
|
|
250
237
|
self,
|
|
@@ -20,16 +20,12 @@ from __future__ import annotations
|
|
|
20
20
|
from collections.abc import Sequence
|
|
21
21
|
from typing import TYPE_CHECKING, ClassVar
|
|
22
22
|
|
|
23
|
+
from airflow.providers.common.compat.sdk import BaseOperator
|
|
23
24
|
from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
|
|
24
25
|
from airflow.providers.teradata.hooks.teradata import TeradataHook
|
|
25
|
-
from airflow.providers.teradata.version_compat import BaseOperator
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
28
|
-
|
|
29
|
-
from airflow.sdk.definitions.context import Context
|
|
30
|
-
except ImportError:
|
|
31
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
32
|
-
from airflow.utils.context import Context
|
|
28
|
+
from airflow.providers.common.compat.sdk import Context
|
|
33
29
|
|
|
34
30
|
|
|
35
31
|
class TeradataOperator(SQLExecuteQueryOperator):
|
|
@@ -23,16 +23,12 @@ from enum import Enum
|
|
|
23
23
|
from functools import cached_property
|
|
24
24
|
from typing import TYPE_CHECKING
|
|
25
25
|
|
|
26
|
+
from airflow.providers.common.compat.sdk import BaseOperator
|
|
26
27
|
from airflow.providers.teradata.hooks.teradata import TeradataHook
|
|
27
28
|
from airflow.providers.teradata.utils.constants import Constants
|
|
28
|
-
from airflow.providers.teradata.version_compat import BaseOperator
|
|
29
29
|
|
|
30
30
|
if TYPE_CHECKING:
|
|
31
|
-
|
|
32
|
-
from airflow.sdk.definitions.context import Context
|
|
33
|
-
except ImportError:
|
|
34
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
35
|
-
from airflow.utils.context import Context
|
|
31
|
+
from airflow.providers.common.compat.sdk import Context
|
|
36
32
|
|
|
37
33
|
from collections.abc import Sequence
|
|
38
34
|
from datetime import timedelta
|
|
@@ -41,11 +37,7 @@ from typing import TYPE_CHECKING, Any, cast
|
|
|
41
37
|
from airflow.providers.teradata.triggers.teradata_compute_cluster import TeradataComputeClusterSyncTrigger
|
|
42
38
|
|
|
43
39
|
if TYPE_CHECKING:
|
|
44
|
-
|
|
45
|
-
from airflow.sdk.definitions.context import Context
|
|
46
|
-
except ImportError:
|
|
47
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
48
|
-
from airflow.utils.context import Context
|
|
40
|
+
from airflow.providers.common.compat.sdk import Context
|
|
49
41
|
|
|
50
42
|
from airflow.exceptions import AirflowException
|
|
51
43
|
|
|
@@ -125,34 +117,40 @@ class _TeradataComputeClusterOperator(BaseOperator):
|
|
|
125
117
|
"""
|
|
126
118
|
self._compute_cluster_execute_complete(event)
|
|
127
119
|
|
|
128
|
-
def _compute_cluster_execute(self):
|
|
120
|
+
def _compute_cluster_execute(self, operation: str | None = None):
|
|
129
121
|
# Verifies the provided compute profile name.
|
|
130
122
|
if (
|
|
131
123
|
self.compute_profile_name is None
|
|
132
124
|
or self.compute_profile_name == "None"
|
|
133
125
|
or self.compute_profile_name == ""
|
|
134
126
|
):
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
127
|
+
raise AirflowException(Constants.CC_OPR_EMPTY_PROFILE_ERROR_MSG % operation)
|
|
128
|
+
try:
|
|
129
|
+
# Verifies if the provided Teradata instance belongs to Vantage Cloud Lake.
|
|
130
|
+
lake_support_find_sql = "SELECT count(1) from DBC.StorageV WHERE StorageName='TD_OFSSTORAGE'"
|
|
131
|
+
lake_support_result = self.hook.run(lake_support_find_sql, handler=_single_result_row_handler)
|
|
132
|
+
if lake_support_result is None:
|
|
133
|
+
raise AirflowException(Constants.CC_GRP_LAKE_SUPPORT_ONLY_MSG % operation)
|
|
134
|
+
except Exception:
|
|
135
|
+
raise AirflowException(Constants.CC_GRP_LAKE_SUPPORT_ONLY_MSG % operation)
|
|
142
136
|
# Getting teradata db version. Considering teradata instance is Lake when db version is 20 or above
|
|
143
137
|
db_version_get_sql = "SELECT InfoData AS Version FROM DBC.DBCInfoV WHERE InfoKey = 'VERSION'"
|
|
144
138
|
try:
|
|
145
139
|
db_version_result = self.hook.run(db_version_get_sql, handler=_single_result_row_handler)
|
|
146
140
|
if db_version_result is not None:
|
|
147
|
-
|
|
148
|
-
|
|
141
|
+
# Safely extract the actual version string from the result
|
|
142
|
+
if isinstance(db_version_result, (list, tuple)) and db_version_result:
|
|
143
|
+
# e.g., if it's a tuple like ('17.10',), get the first element
|
|
144
|
+
version_str = str(db_version_result[0])
|
|
145
|
+
else:
|
|
146
|
+
version_str = str(db_version_result) # fallback, should be rare
|
|
147
|
+
db_version = version_str.split(".")[0]
|
|
149
148
|
if db_version is not None and int(db_version) < 20:
|
|
150
|
-
raise AirflowException(Constants.CC_GRP_LAKE_SUPPORT_ONLY_MSG)
|
|
149
|
+
raise AirflowException(Constants.CC_GRP_LAKE_SUPPORT_ONLY_MSG % operation)
|
|
151
150
|
else:
|
|
152
|
-
raise AirflowException(
|
|
153
|
-
except Exception
|
|
154
|
-
|
|
155
|
-
raise AirflowException("Error occurred while getting teradata database version")
|
|
151
|
+
raise AirflowException(Constants.CC_ERR_VERSION_GET)
|
|
152
|
+
except Exception:
|
|
153
|
+
raise AirflowException(Constants.CC_ERR_VERSION_GET)
|
|
156
154
|
|
|
157
155
|
def _compute_cluster_execute_complete(self, event: dict[str, Any]) -> None:
|
|
158
156
|
if event["status"] == "success":
|
|
@@ -270,8 +268,8 @@ class TeradataComputeClusterProvisionOperator(_TeradataComputeClusterOperator):
|
|
|
270
268
|
"""
|
|
271
269
|
return self._compute_cluster_execute()
|
|
272
270
|
|
|
273
|
-
def _compute_cluster_execute(self):
|
|
274
|
-
super()._compute_cluster_execute()
|
|
271
|
+
def _compute_cluster_execute(self, operation: str | None = None):
|
|
272
|
+
super()._compute_cluster_execute("Provision")
|
|
275
273
|
if self.compute_group_name:
|
|
276
274
|
cg_status_query = (
|
|
277
275
|
"SELECT count(1) FROM DBC.ComputeGroups WHERE UPPER(ComputeGroupName) = UPPER('"
|
|
@@ -300,7 +298,8 @@ class TeradataComputeClusterProvisionOperator(_TeradataComputeClusterOperator):
|
|
|
300
298
|
cp_status_result = self._hook_run(cp_status_query, handler=_single_result_row_handler)
|
|
301
299
|
if cp_status_result is not None:
|
|
302
300
|
cp_status_result = str(cp_status_result)
|
|
303
|
-
msg = f"Compute Profile {self.compute_profile_name}
|
|
301
|
+
msg = f"Compute Profile '{self.compute_profile_name}' already exists under Compute Group '{self.compute_group_name}'. Status: {cp_status_result}."
|
|
302
|
+
|
|
304
303
|
self.log.info(msg)
|
|
305
304
|
return cp_status_result
|
|
306
305
|
create_cp_query = self._build_ccp_setup_query()
|
|
@@ -357,21 +356,22 @@ class TeradataComputeClusterDecommissionOperator(_TeradataComputeClusterOperator
|
|
|
357
356
|
"""
|
|
358
357
|
return self._compute_cluster_execute()
|
|
359
358
|
|
|
360
|
-
def _compute_cluster_execute(self):
|
|
361
|
-
super()._compute_cluster_execute()
|
|
359
|
+
def _compute_cluster_execute(self, operation: str | None = None):
|
|
360
|
+
super()._compute_cluster_execute("Decommission")
|
|
362
361
|
cp_drop_query = "DROP COMPUTE PROFILE " + self.compute_profile_name
|
|
363
362
|
if self.compute_group_name:
|
|
364
363
|
cp_drop_query = cp_drop_query + " IN COMPUTE GROUP " + self.compute_group_name
|
|
365
364
|
self._hook_run(cp_drop_query, handler=_single_result_row_handler)
|
|
366
365
|
self.log.info(
|
|
367
|
-
"Compute Profile %s
|
|
366
|
+
"Compute Profile %s in Compute Group %s is successfully dropped.",
|
|
368
367
|
self.compute_profile_name,
|
|
369
368
|
self.compute_group_name,
|
|
370
369
|
)
|
|
371
|
-
|
|
370
|
+
|
|
371
|
+
if self.delete_compute_group and self.compute_group_name:
|
|
372
372
|
cg_drop_query = "DROP COMPUTE GROUP " + self.compute_group_name
|
|
373
373
|
self._hook_run(cg_drop_query, handler=_single_result_row_handler)
|
|
374
|
-
self.log.info("Compute Group %s is successfully dropped", self.compute_group_name)
|
|
374
|
+
self.log.info("Compute Group %s is successfully dropped.", self.compute_group_name)
|
|
375
375
|
|
|
376
376
|
|
|
377
377
|
class TeradataComputeClusterResumeOperator(_TeradataComputeClusterOperator):
|
|
@@ -417,8 +417,8 @@ class TeradataComputeClusterResumeOperator(_TeradataComputeClusterOperator):
|
|
|
417
417
|
"""
|
|
418
418
|
return self._compute_cluster_execute()
|
|
419
419
|
|
|
420
|
-
def _compute_cluster_execute(self):
|
|
421
|
-
super()._compute_cluster_execute()
|
|
420
|
+
def _compute_cluster_execute(self, operation: str | None = None):
|
|
421
|
+
super()._compute_cluster_execute("Resume")
|
|
422
422
|
cc_status_query = (
|
|
423
423
|
"SEL ComputeProfileState FROM DBC.ComputeProfilesVX WHERE UPPER(ComputeProfileName) = UPPER('"
|
|
424
424
|
+ self.compute_profile_name
|
|
@@ -432,15 +432,16 @@ class TeradataComputeClusterResumeOperator(_TeradataComputeClusterOperator):
|
|
|
432
432
|
# Generates an error message if the compute cluster does not exist for the specified
|
|
433
433
|
# compute profile and compute group.
|
|
434
434
|
else:
|
|
435
|
-
|
|
436
|
-
raise AirflowException(Constants.CC_GRP_PRP_NON_EXISTS_MSG)
|
|
435
|
+
raise AirflowException(Constants.CC_GRP_PRP_NON_EXISTS_MSG % operation)
|
|
437
436
|
if cp_status_result != Constants.CC_RESUME_DB_STATUS:
|
|
438
437
|
cp_resume_query = f"RESUME COMPUTE FOR COMPUTE PROFILE {self.compute_profile_name}"
|
|
439
438
|
if self.compute_group_name:
|
|
440
439
|
cp_resume_query = f"{cp_resume_query} IN COMPUTE GROUP {self.compute_group_name}"
|
|
441
440
|
return self._handle_cc_status(Constants.CC_RESUME_OPR, cp_resume_query)
|
|
442
441
|
self.log.info(
|
|
443
|
-
"Compute Cluster %s already %s
|
|
442
|
+
"Compute Cluster %s is already in '%s' status.",
|
|
443
|
+
self.compute_profile_name,
|
|
444
|
+
Constants.CC_RESUME_DB_STATUS,
|
|
444
445
|
)
|
|
445
446
|
|
|
446
447
|
|
|
@@ -487,8 +488,8 @@ class TeradataComputeClusterSuspendOperator(_TeradataComputeClusterOperator):
|
|
|
487
488
|
"""
|
|
488
489
|
return self._compute_cluster_execute()
|
|
489
490
|
|
|
490
|
-
def _compute_cluster_execute(self):
|
|
491
|
-
super()._compute_cluster_execute()
|
|
491
|
+
def _compute_cluster_execute(self, operation: str | None = None):
|
|
492
|
+
super()._compute_cluster_execute("Suspend")
|
|
492
493
|
sql = (
|
|
493
494
|
"SEL ComputeProfileState FROM DBC.ComputeProfilesVX WHERE UPPER(ComputeProfileName) = UPPER('"
|
|
494
495
|
+ self.compute_profile_name
|
|
@@ -502,13 +503,14 @@ class TeradataComputeClusterSuspendOperator(_TeradataComputeClusterOperator):
|
|
|
502
503
|
# Generates an error message if the compute cluster does not exist for the specified
|
|
503
504
|
# compute profile and compute group.
|
|
504
505
|
else:
|
|
505
|
-
|
|
506
|
-
raise AirflowException(Constants.CC_GRP_PRP_NON_EXISTS_MSG)
|
|
506
|
+
raise AirflowException(Constants.CC_GRP_PRP_NON_EXISTS_MSG % operation)
|
|
507
507
|
if result != Constants.CC_SUSPEND_DB_STATUS:
|
|
508
508
|
sql = f"SUSPEND COMPUTE FOR COMPUTE PROFILE {self.compute_profile_name}"
|
|
509
509
|
if self.compute_group_name:
|
|
510
510
|
sql = f"{sql} IN COMPUTE GROUP {self.compute_group_name}"
|
|
511
511
|
return self._handle_cc_status(Constants.CC_SUSPEND_OPR, sql)
|
|
512
512
|
self.log.info(
|
|
513
|
-
"Compute Cluster %s already %s
|
|
513
|
+
"Compute Cluster %s is already in '%s' status.",
|
|
514
|
+
self.compute_profile_name,
|
|
515
|
+
Constants.CC_SUSPEND_DB_STATUS,
|
|
514
516
|
)
|
|
@@ -28,11 +28,11 @@ except ModuleNotFoundError as e:
|
|
|
28
28
|
|
|
29
29
|
raise AirflowOptionalProviderFeatureException(e)
|
|
30
30
|
|
|
31
|
+
from airflow.providers.common.compat.sdk import BaseOperator
|
|
31
32
|
from airflow.providers.teradata.hooks.teradata import TeradataHook
|
|
32
|
-
from airflow.providers.teradata.version_compat import BaseOperator
|
|
33
33
|
|
|
34
34
|
if TYPE_CHECKING:
|
|
35
|
-
from airflow.
|
|
35
|
+
from airflow.providers.common.compat.sdk import Context
|
|
36
36
|
|
|
37
37
|
|
|
38
38
|
class AzureBlobStorageToTeradataOperator(BaseOperator):
|
|
@@ -27,15 +27,11 @@ except ModuleNotFoundError as e:
|
|
|
27
27
|
from airflow.exceptions import AirflowOptionalProviderFeatureException
|
|
28
28
|
|
|
29
29
|
raise AirflowOptionalProviderFeatureException(e)
|
|
30
|
+
from airflow.providers.common.compat.sdk import BaseOperator
|
|
30
31
|
from airflow.providers.teradata.hooks.teradata import TeradataHook
|
|
31
|
-
from airflow.providers.teradata.version_compat import BaseOperator
|
|
32
32
|
|
|
33
33
|
if TYPE_CHECKING:
|
|
34
|
-
|
|
35
|
-
from airflow.sdk.definitions.context import Context
|
|
36
|
-
except ImportError:
|
|
37
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
38
|
-
from airflow.utils.context import Context
|
|
34
|
+
from airflow.providers.common.compat.sdk import Context
|
|
39
35
|
|
|
40
36
|
|
|
41
37
|
class S3ToTeradataOperator(BaseOperator):
|
|
@@ -21,15 +21,11 @@ from collections.abc import Sequence
|
|
|
21
21
|
from functools import cached_property
|
|
22
22
|
from typing import TYPE_CHECKING
|
|
23
23
|
|
|
24
|
+
from airflow.providers.common.compat.sdk import BaseOperator
|
|
24
25
|
from airflow.providers.teradata.hooks.teradata import TeradataHook
|
|
25
|
-
from airflow.providers.teradata.version_compat import BaseOperator
|
|
26
26
|
|
|
27
27
|
if TYPE_CHECKING:
|
|
28
|
-
|
|
29
|
-
from airflow.sdk.definitions.context import Context
|
|
30
|
-
except ImportError:
|
|
31
|
-
# TODO: Remove once provider drops support for Airflow 2
|
|
32
|
-
from airflow.utils.context import Context
|
|
28
|
+
from airflow.providers.common.compat.sdk import Context
|
|
33
29
|
|
|
34
30
|
|
|
35
31
|
class TeradataToTeradataOperator(BaseOperator):
|
|
@@ -73,8 +73,7 @@ class TeradataComputeClusterSyncTrigger(BaseTrigger):
|
|
|
73
73
|
while True:
|
|
74
74
|
status = await self.get_status()
|
|
75
75
|
if status is None or len(status) == 0:
|
|
76
|
-
|
|
77
|
-
raise AirflowException(Constants.CC_GRP_PRP_NON_EXISTS_MSG)
|
|
76
|
+
raise AirflowException(Constants.CC_GRP_PRP_NON_EXISTS_MSG % "manage")
|
|
78
77
|
if (
|
|
79
78
|
self.operation_type == Constants.CC_SUSPEND_OPR
|
|
80
79
|
or self.operation_type == Constants.CC_CREATE_SUSPEND_OPR
|
|
@@ -108,8 +107,8 @@ class TeradataComputeClusterSyncTrigger(BaseTrigger):
|
|
|
108
107
|
yield TriggerEvent(
|
|
109
108
|
{
|
|
110
109
|
"status": "error",
|
|
111
|
-
"message": Constants.
|
|
112
|
-
% (self.
|
|
110
|
+
"message": Constants.CC_OPR_TIMEOUT_ERROR
|
|
111
|
+
% (self.operation_type, self.compute_profile_name),
|
|
113
112
|
}
|
|
114
113
|
)
|
|
115
114
|
elif (
|
|
@@ -128,8 +127,8 @@ class TeradataComputeClusterSyncTrigger(BaseTrigger):
|
|
|
128
127
|
yield TriggerEvent(
|
|
129
128
|
{
|
|
130
129
|
"status": "error",
|
|
131
|
-
"message": Constants.
|
|
132
|
-
% (self.
|
|
130
|
+
"message": Constants.CC_OPR_TIMEOUT_ERROR
|
|
131
|
+
% (self.operation_type, self.compute_profile_name),
|
|
133
132
|
}
|
|
134
133
|
)
|
|
135
134
|
else:
|
|
@@ -137,7 +136,7 @@ class TeradataComputeClusterSyncTrigger(BaseTrigger):
|
|
|
137
136
|
except Exception as e:
|
|
138
137
|
yield TriggerEvent({"status": "error", "message": str(e)})
|
|
139
138
|
except asyncio.CancelledError:
|
|
140
|
-
self.log.error(Constants.CC_OPR_TIMEOUT_ERROR, self.operation_type)
|
|
139
|
+
self.log.error(Constants.CC_OPR_TIMEOUT_ERROR, self.operation_type, self.compute_profile_name)
|
|
141
140
|
|
|
142
141
|
async def get_status(self) -> str:
|
|
143
142
|
"""Return compute cluster SUSPEND/RESUME operation status."""
|
|
@@ -29,18 +29,21 @@ class Constants:
|
|
|
29
29
|
CC_SUSPEND_DB_STATUS = "Suspended"
|
|
30
30
|
CC_RESUME_DB_STATUS = "Running"
|
|
31
31
|
CC_OPR_SUCCESS_STATUS_MSG = "Compute Cluster %s %s operation completed successfully."
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
"There is an issue with the %s operation. Kindly consult the administrator for assistance."
|
|
32
|
+
CC_OPR_EMPTY_PROFILE_ERROR_MSG = "Failed to %s the Vantage Cloud Lake Compute Cluster Instance due to an invalid compute cluster profile name."
|
|
33
|
+
CC_GRP_PRP_NON_EXISTS_MSG = "Failed to %s the Vantage Cloud Lake Compute Cluster Instance because the specified compute cluster does not exist or the user lacks the necessary permissions to access the Compute Cluster Instance."
|
|
34
|
+
CC_GRP_LAKE_SUPPORT_ONLY_MSG = "Failed to %s the Vantage Cloud Lake Compute Cluster Instance because the Compute Cluster feature is supported only on the Vantage Cloud Lake system."
|
|
35
|
+
CC_OPR_TIMEOUT_ERROR = "Failed to %s the Vantage Cloud Lake Compute Cluster Instance `%s`. Please contact the administrator for assistance."
|
|
36
|
+
CC_ERR_VERSION_GET = "Failed to manage the Vantage Cloud Lake Compute Cluster Instance due to an error while getting the Teradata database version."
|
|
37
|
+
BTEQ_REMOTE_ERROR_MSG = (
|
|
38
|
+
"Failed to establish a SSH connection to the remote machine for executing the BTEQ script."
|
|
40
39
|
)
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
40
|
+
BTEQ_UNEXPECTED_ERROR_MSG = "Failure while executing BTEQ script due to unexpected error."
|
|
41
|
+
BTEQ_TIMEOUT_ERROR_MSG = "Failed to execute BTEQ script due to timeout after %s seconds."
|
|
42
|
+
BTEQ_MISSED_PARAMS = "Failed to execute BTEQ script due to missing required parameters: either 'sql' or 'file_path' must be provided."
|
|
43
|
+
BTEQ_INVALID_PATH = (
|
|
44
|
+
"Failed to execute BTEQ script due to invalid file path: '%s' does not exist or is inaccessible."
|
|
44
45
|
)
|
|
46
|
+
BTEQ_INVALID_CHARSET = "Failed to execute BTEQ script because the provided file '%s' encoding differs from the specified BTEQ I/O encoding %s"
|
|
47
|
+
BTEQ_REMOTE_FILE_PATH_INVALID = "Failed to execute BTEQ script due to invalid remote file path: '%s' does not exist or is inaccessible on the remote machine."
|
|
45
48
|
CC_OPR_TIME_OUT = 1200
|
|
46
49
|
CC_POLL_INTERVAL = 60
|
|
@@ -35,14 +35,4 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
|
|
|
35
35
|
AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
|
|
36
36
|
AIRFLOW_V_3_1_PLUS: bool = get_base_airflow_version_tuple() >= (3, 1, 0)
|
|
37
37
|
|
|
38
|
-
|
|
39
|
-
from airflow.sdk import BaseHook
|
|
40
|
-
else:
|
|
41
|
-
from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
|
|
42
|
-
|
|
43
|
-
if AIRFLOW_V_3_0_PLUS:
|
|
44
|
-
from airflow.sdk import BaseOperator
|
|
45
|
-
else:
|
|
46
|
-
from airflow.models import BaseOperator
|
|
47
|
-
|
|
48
|
-
__all__ = ["AIRFLOW_V_3_0_PLUS", "AIRFLOW_V_3_1_PLUS", "BaseHook", "BaseOperator"]
|
|
38
|
+
__all__ = ["AIRFLOW_V_3_0_PLUS", "AIRFLOW_V_3_1_PLUS"]
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: apache-airflow-providers-teradata
|
|
3
|
-
Version: 3.2.
|
|
3
|
+
Version: 3.2.3
|
|
4
4
|
Summary: Provider package apache-airflow-providers-teradata for Apache Airflow
|
|
5
5
|
Keywords: airflow-provider,teradata,airflow,integration
|
|
6
6
|
Author-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
7
7
|
Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/x-rst
|
|
10
|
+
License-Expression: Apache-2.0
|
|
10
11
|
Classifier: Development Status :: 5 - Production/Stable
|
|
11
12
|
Classifier: Environment :: Console
|
|
12
13
|
Classifier: Environment :: Web Environment
|
|
@@ -14,22 +15,24 @@ Classifier: Intended Audience :: Developers
|
|
|
14
15
|
Classifier: Intended Audience :: System Administrators
|
|
15
16
|
Classifier: Framework :: Apache Airflow
|
|
16
17
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
17
|
-
Classifier: License :: OSI Approved :: Apache Software License
|
|
18
18
|
Classifier: Programming Language :: Python :: 3.10
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.11
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.12
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.13
|
|
22
22
|
Classifier: Topic :: System :: Monitoring
|
|
23
|
-
|
|
24
|
-
|
|
23
|
+
License-File: LICENSE
|
|
24
|
+
License-File: NOTICE
|
|
25
|
+
Requires-Dist: apache-airflow>=2.10.0
|
|
26
|
+
Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
|
|
27
|
+
Requires-Dist: apache-airflow-providers-common-sql>=1.20.0
|
|
25
28
|
Requires-Dist: teradatasqlalchemy>=17.20.0.0
|
|
26
29
|
Requires-Dist: teradatasql>=17.20.0.28
|
|
27
30
|
Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
|
|
28
31
|
Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
|
|
29
32
|
Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
|
|
30
33
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
31
|
-
Project-URL: Changelog, https://airflow.
|
|
32
|
-
Project-URL: Documentation, https://airflow.
|
|
34
|
+
Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.3/changelog.html
|
|
35
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.3
|
|
33
36
|
Project-URL: Mastodon, https://fosstodon.org/@airflow
|
|
34
37
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
35
38
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
@@ -63,9 +66,8 @@ Provides-Extra: ssh
|
|
|
63
66
|
|
|
64
67
|
Package ``apache-airflow-providers-teradata``
|
|
65
68
|
|
|
66
|
-
Release: ``3.2.
|
|
69
|
+
Release: ``3.2.3``
|
|
67
70
|
|
|
68
|
-
Release Date: ``|PypiReleaseDate|``
|
|
69
71
|
|
|
70
72
|
`Teradata <https://www.teradata.com/>`__
|
|
71
73
|
|
|
@@ -77,12 +79,12 @@ This is a provider package for ``teradata`` provider. All classes for this provi
|
|
|
77
79
|
are in ``airflow.providers.teradata`` python package.
|
|
78
80
|
|
|
79
81
|
You can find package information and changelog for the provider
|
|
80
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.
|
|
82
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.3/>`_.
|
|
81
83
|
|
|
82
84
|
Installation
|
|
83
85
|
------------
|
|
84
86
|
|
|
85
|
-
You can install this package on top of an existing Airflow
|
|
87
|
+
You can install this package on top of an existing Airflow installation (see ``Requirements`` below
|
|
86
88
|
for the minimum Airflow version supported) via
|
|
87
89
|
``pip install apache-airflow-providers-teradata``
|
|
88
90
|
|
|
@@ -91,14 +93,15 @@ The package supports the following python versions: 3.10,3.11,3.12,3.13
|
|
|
91
93
|
Requirements
|
|
92
94
|
------------
|
|
93
95
|
|
|
94
|
-
|
|
95
|
-
PIP package
|
|
96
|
-
|
|
97
|
-
``apache-airflow``
|
|
98
|
-
``apache-airflow-providers-common-
|
|
99
|
-
``
|
|
100
|
-
``
|
|
101
|
-
|
|
96
|
+
========================================== ==================
|
|
97
|
+
PIP package Version required
|
|
98
|
+
========================================== ==================
|
|
99
|
+
``apache-airflow`` ``>=2.10.0``
|
|
100
|
+
``apache-airflow-providers-common-compat`` ``>=1.8.0``
|
|
101
|
+
``apache-airflow-providers-common-sql`` ``>=1.20.0``
|
|
102
|
+
``teradatasqlalchemy`` ``>=17.20.0.0``
|
|
103
|
+
``teradatasql`` ``>=17.20.0.28``
|
|
104
|
+
========================================== ==================
|
|
102
105
|
|
|
103
106
|
Cross provider package dependencies
|
|
104
107
|
-----------------------------------
|
|
@@ -117,11 +120,23 @@ You can install such cross-provider dependencies when installing from PyPI. For
|
|
|
117
120
|
Dependent package Extra
|
|
118
121
|
====================================================================================================================== ===================
|
|
119
122
|
`apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
|
|
123
|
+
`apache-airflow-providers-common-compat <https://airflow.apache.org/docs/apache-airflow-providers-common-compat>`_ ``common.compat``
|
|
120
124
|
`apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
|
|
121
125
|
`apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
|
|
122
126
|
`apache-airflow-providers-ssh <https://airflow.apache.org/docs/apache-airflow-providers-ssh>`_ ``ssh``
|
|
123
127
|
====================================================================================================================== ===================
|
|
124
128
|
|
|
129
|
+
Optional dependencies
|
|
130
|
+
----------------------
|
|
131
|
+
|
|
132
|
+
=================== ============================================
|
|
133
|
+
Extra Dependencies
|
|
134
|
+
=================== ============================================
|
|
135
|
+
``microsoft.azure`` ``apache-airflow-providers-microsoft-azure``
|
|
136
|
+
``amazon`` ``apache-airflow-providers-amazon``
|
|
137
|
+
``ssh`` ``apache-airflow-providers-ssh``
|
|
138
|
+
=================== ============================================
|
|
139
|
+
|
|
125
140
|
The changelog for the provider package can be found in the
|
|
126
|
-
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.
|
|
141
|
+
`changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.3/changelog.html>`_.
|
|
127
142
|
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
airflow/providers/teradata/__init__.py,sha256=qjJ9Qyz4bJN2cEDbUkvumx6pUztx92At8JG0f66UwNQ,1497
|
|
2
|
+
airflow/providers/teradata/get_provider_info.py,sha256=NNVCUdS1bDvLBLUEDVe8cFP-eGnW4TgmZMgErIN-oUc,4607
|
|
3
|
+
airflow/providers/teradata/version_compat.py,sha256=cmeoGMcTp0kiFa3GKZlQh31_kMk7UX9nOX8sYyGtuFg,1665
|
|
4
|
+
airflow/providers/teradata/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
5
|
+
airflow/providers/teradata/hooks/bteq.py,sha256=lAE63jZFgWLlqGkTGxRFkMErdQUgy48Bbi0vegRmdPg,15007
|
|
6
|
+
airflow/providers/teradata/hooks/teradata.py,sha256=k_u9iJsuwjr7-jdBkEGd_m4yjVfWwDrV0pXrbjUrxPE,11572
|
|
7
|
+
airflow/providers/teradata/hooks/ttu.py,sha256=Mc-CFcF_v6McbkzhP8gMWWMmjNCwFuVduqoDu98_8ow,3689
|
|
8
|
+
airflow/providers/teradata/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
9
|
+
airflow/providers/teradata/operators/bteq.py,sha256=6MZpHoJwK_g_UFWjL0hTgCyei4LLl6PygU15ZZiytqs,12656
|
|
10
|
+
airflow/providers/teradata/operators/teradata.py,sha256=7GIkQZWaxAyMYX7srcD6MT1GvhfGSLaiAZUfYX-BpN0,3719
|
|
11
|
+
airflow/providers/teradata/operators/teradata_compute_cluster.py,sha256=X3j4xI63PnH2rkyxFvO7ebjuRAyc6pxl-RAVec9CVII,22033
|
|
12
|
+
airflow/providers/teradata/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
13
|
+
airflow/providers/teradata/transfers/azure_blob_to_teradata.py,sha256=mwvqDLPLOFH-9R6Uj-MjUdsRJ2lOAFVlCuAvI9u-AZY,5686
|
|
14
|
+
airflow/providers/teradata/transfers/s3_to_teradata.py,sha256=vzxOAjq8_uxMxXqtqCrA3JVy1LlY4a-XkK0qheNZ_to,5546
|
|
15
|
+
airflow/providers/teradata/transfers/teradata_to_teradata.py,sha256=Gf5t90sUf4ZgUVYMMA6qSHk_nob2ZQkWoGNV8oeoVSs,3891
|
|
16
|
+
airflow/providers/teradata/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
17
|
+
airflow/providers/teradata/triggers/teradata_compute_cluster.py,sha256=a8L-S2R5hk4DSmTy1NvMKYx5n27DqMzR6t0kKv1jrBU,6944
|
|
18
|
+
airflow/providers/teradata/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
19
|
+
airflow/providers/teradata/utils/bteq_util.py,sha256=GCIPc1PCIOC-YWwxgYHr-N1d7X8ZGPU5Lmax_JZY6rA,7360
|
|
20
|
+
airflow/providers/teradata/utils/constants.py,sha256=daNhA6ixICQi2lqliSXHezBEVcibL_kOBq6hOZTCGdM,3090
|
|
21
|
+
airflow/providers/teradata/utils/encryption_utils.py,sha256=ARGWmgBbvSq6_MQHfTevvfvHjaiBjQI62UXltDcJLJo,2578
|
|
22
|
+
apache_airflow_providers_teradata-3.2.3.dist-info/entry_points.txt,sha256=JbigXoUoKVSNWG-_-029FCCuehMOmAvuSnNGZ9Bz1Kc,104
|
|
23
|
+
apache_airflow_providers_teradata-3.2.3.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
24
|
+
apache_airflow_providers_teradata-3.2.3.dist-info/licenses/NOTICE,sha256=E3-_E02gwwSEFzeeWPKmnIjOoos3hW28CLISV6sYrbQ,168
|
|
25
|
+
apache_airflow_providers_teradata-3.2.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
26
|
+
apache_airflow_providers_teradata-3.2.3.dist-info/METADATA,sha256=C3Ev9_HeUdD5oejacmaCpT3cczjWv-gNiDq55VVm6Mg,6741
|
|
27
|
+
apache_airflow_providers_teradata-3.2.3.dist-info/RECORD,,
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
airflow/providers/teradata/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
2
|
-
airflow/providers/teradata/__init__.py,sha256=TbCy6Rbt5qrxNqFXgDDD3TeFNMaUWAbjFkJ28gnOVj0,1497
|
|
3
|
-
airflow/providers/teradata/get_provider_info.py,sha256=SF-3YIl3CCi6mN6b9EEqLkJNyuMIM35CvY_H91QdELw,4031
|
|
4
|
-
airflow/providers/teradata/version_compat.py,sha256=rSCzUYsqEbM9-qivzLBYCnll2FFVAvk8LyMZvUi_sNE,1958
|
|
5
|
-
airflow/providers/teradata/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
6
|
-
airflow/providers/teradata/hooks/bteq.py,sha256=9N9z2NbouKbwYiikCbMkVAobvgpRvHgN4uzChQ5c8GU,15067
|
|
7
|
-
airflow/providers/teradata/hooks/teradata.py,sha256=fOt3ZriM-rB27d-7RWhoEBZJyZcNkHowrSh4hqiNNi8,10964
|
|
8
|
-
airflow/providers/teradata/hooks/ttu.py,sha256=dn8_KFkCg_qTkoCxdyxpZUJM2fEMInhlX6UW_D7dCLY,3695
|
|
9
|
-
airflow/providers/teradata/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
10
|
-
airflow/providers/teradata/operators/bteq.py,sha256=w7CysOA1ioVvTLhPsj_g3paCM8YH0Yh-wUJLKRJsLZk,13223
|
|
11
|
-
airflow/providers/teradata/operators/teradata.py,sha256=G8Vgu2g-lyX7lisdEZOafIFJEx-eeeM9bgNI4fDKnq0,3873
|
|
12
|
-
airflow/providers/teradata/operators/teradata_compute_cluster.py,sha256=30v9PCBDUlkAKVqxsz4ctcai709h-fZGomUW0h8QmXU,21829
|
|
13
|
-
airflow/providers/teradata/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
14
|
-
airflow/providers/teradata/transfers/azure_blob_to_teradata.py,sha256=XnMsQSLS-kQP7aBRer3bLro8qgNohcJ3H63c2YIjYMA,5678
|
|
15
|
-
airflow/providers/teradata/transfers/s3_to_teradata.py,sha256=-YskshSjx-qVniuSIHIQc4qJmfpxvhqiiy2zvXDzewk,5700
|
|
16
|
-
airflow/providers/teradata/transfers/teradata_to_teradata.py,sha256=J6ibOly6vjzS4Vwf0oLOL5pILCoyI8q9wDwpVP9efV4,4045
|
|
17
|
-
airflow/providers/teradata/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
18
|
-
airflow/providers/teradata/triggers/teradata_compute_cluster.py,sha256=hjMTnOpqlbByTtmNdJ9usK7hilEAz4tFXpJoENgFhyo,6987
|
|
19
|
-
airflow/providers/teradata/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
20
|
-
airflow/providers/teradata/utils/bteq_util.py,sha256=GCIPc1PCIOC-YWwxgYHr-N1d7X8ZGPU5Lmax_JZY6rA,7360
|
|
21
|
-
airflow/providers/teradata/utils/constants.py,sha256=ro1FVNsAakal8_uX27aN0DTVO0T9FG4fv9HzBIY2I-w,2253
|
|
22
|
-
airflow/providers/teradata/utils/encryption_utils.py,sha256=ARGWmgBbvSq6_MQHfTevvfvHjaiBjQI62UXltDcJLJo,2578
|
|
23
|
-
apache_airflow_providers_teradata-3.2.1rc1.dist-info/entry_points.txt,sha256=JbigXoUoKVSNWG-_-029FCCuehMOmAvuSnNGZ9Bz1Kc,104
|
|
24
|
-
apache_airflow_providers_teradata-3.2.1rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
|
|
25
|
-
apache_airflow_providers_teradata-3.2.1rc1.dist-info/METADATA,sha256=MrVA9qbumPrk_c2MRss-EJwFsxl62QKUGxRLJpsw92s,6056
|
|
26
|
-
apache_airflow_providers_teradata-3.2.1rc1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
{airflow/providers/teradata → apache_airflow_providers_teradata-3.2.3.dist-info/licenses}/LICENSE
RENAMED
|
File without changes
|