apache-airflow-providers-teradata 3.1.0rc1__py3-none-any.whl → 3.2.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "3.1.0"
32
+ __version__ = "3.2.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -45,10 +45,13 @@ def get_provider_info():
45
45
  "airflow.providers.teradata.operators.teradata",
46
46
  "airflow.providers.teradata.operators.teradata_compute_cluster",
47
47
  ],
48
- }
48
+ },
49
+ {"integration-name": "Bteq", "python-modules": ["airflow.providers.teradata.operators.bteq"]},
49
50
  ],
50
51
  "hooks": [
51
- {"integration-name": "Teradata", "python-modules": ["airflow.providers.teradata.hooks.teradata"]}
52
+ {"integration-name": "Teradata", "python-modules": ["airflow.providers.teradata.hooks.teradata"]},
53
+ {"integration-name": "Ttu", "python-modules": ["airflow.providers.teradata.hooks.ttu"]},
54
+ {"integration-name": "Bteq", "python-modules": ["airflow.providers.teradata.hooks.bteq"]},
52
55
  ],
53
56
  "transfers": [
54
57
  {
@@ -0,0 +1,339 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ import os
21
+ import socket
22
+ import subprocess
23
+ import tempfile
24
+ from contextlib import contextmanager
25
+
26
+ from paramiko import SSHException
27
+
28
+ from airflow.exceptions import AirflowException
29
+ from airflow.providers.ssh.hooks.ssh import SSHHook
30
+ from airflow.providers.teradata.hooks.ttu import TtuHook
31
+ from airflow.providers.teradata.utils.bteq_util import (
32
+ get_remote_tmp_dir,
33
+ identify_os,
34
+ prepare_bteq_command_for_local_execution,
35
+ prepare_bteq_command_for_remote_execution,
36
+ transfer_file_sftp,
37
+ verify_bteq_installed,
38
+ verify_bteq_installed_remote,
39
+ )
40
+ from airflow.providers.teradata.utils.encryption_utils import (
41
+ decrypt_remote_file_to_string,
42
+ generate_encrypted_file_with_openssl,
43
+ generate_random_password,
44
+ )
45
+
46
+
47
+ class BteqHook(TtuHook):
48
+ """
49
+ Hook for executing BTEQ (Basic Teradata Query) scripts.
50
+
51
+ This hook provides functionality to execute BTEQ scripts either locally or remotely via SSH.
52
+ It extends the `TtuHook` and integrates with Airflow's SSHHook for remote execution.
53
+
54
+ The BTEQ scripts are used to interact with Teradata databases, allowing users to perform
55
+ operations such as querying, data manipulation, and administrative tasks.
56
+
57
+ Features:
58
+ - Supports both local and remote execution of BTEQ scripts.
59
+ - Handles connection details, script preparation, and execution.
60
+ - Provides robust error handling and logging for debugging.
61
+ - Allows configuration of session parameters like output width and encoding.
62
+
63
+ .. seealso::
64
+ - :ref:`hook API connection <howto/connection:teradata>`
65
+
66
+ :param bteq_script: The BTEQ script to be executed. This can be a string containing the BTEQ commands.
67
+ :param remote_working_dir: Temporary directory location on the remote host (via SSH) where the BTEQ script will be transferred and executed. Defaults to `/tmp` if not specified. This is only applicable when `ssh_conn_id` is provided.
68
+ :param bteq_script_encoding: Character encoding for the BTEQ script file. Defaults to ASCII if not specified.
69
+ :param timeout: Timeout (in seconds) for executing the BTEQ command. Default is 600 seconds (10 minutes).
70
+ :param timeout_rc: Return code to use if the BTEQ execution fails due to a timeout. To allow DAG execution to continue after a timeout, include this value in `bteq_quit_rc`. If not specified, a timeout will raise an exception and stop the DAG.
71
+ :param bteq_session_encoding: Character encoding for the BTEQ session. Defaults to UTF-8 if not specified.
72
+ :param bteq_quit_rc: Accepts a single integer, list, or tuple of return codes. Specifies which BTEQ return codes should be treated as successful, allowing subsequent tasks to continue execution.
73
+ """
74
+
75
+ def __init__(self, teradata_conn_id: str, ssh_conn_id: str | None = None, *args, **kwargs):
76
+ super().__init__(teradata_conn_id, *args, **kwargs)
77
+ self.ssh_conn_id = ssh_conn_id
78
+ self.ssh_hook = SSHHook(ssh_conn_id=ssh_conn_id) if ssh_conn_id else None
79
+
80
+ def execute_bteq_script(
81
+ self,
82
+ bteq_script: str,
83
+ remote_working_dir: str | None,
84
+ bteq_script_encoding: str | None,
85
+ timeout: int,
86
+ timeout_rc: int | None,
87
+ bteq_session_encoding: str | None,
88
+ bteq_quit_rc: int | list[int] | tuple[int, ...] | None,
89
+ temp_file_read_encoding: str | None,
90
+ ) -> int | None:
91
+ """Execute the BTEQ script either in local machine or on remote host based on ssh_conn_id."""
92
+ # Remote execution
93
+ if self.ssh_hook:
94
+ # Write script to local temp file
95
+ # Encrypt the file locally
96
+ return self.execute_bteq_script_at_remote(
97
+ bteq_script,
98
+ remote_working_dir,
99
+ bteq_script_encoding,
100
+ timeout,
101
+ timeout_rc,
102
+ bteq_session_encoding,
103
+ bteq_quit_rc,
104
+ temp_file_read_encoding,
105
+ )
106
+ return self.execute_bteq_script_at_local(
107
+ bteq_script,
108
+ bteq_script_encoding,
109
+ timeout,
110
+ timeout_rc,
111
+ bteq_quit_rc,
112
+ bteq_session_encoding,
113
+ temp_file_read_encoding,
114
+ )
115
+
116
+ def execute_bteq_script_at_remote(
117
+ self,
118
+ bteq_script: str,
119
+ remote_working_dir: str | None,
120
+ bteq_script_encoding: str | None,
121
+ timeout: int,
122
+ timeout_rc: int | None,
123
+ bteq_session_encoding: str | None,
124
+ bteq_quit_rc: int | list[int] | tuple[int, ...] | None,
125
+ temp_file_read_encoding: str | None,
126
+ ) -> int | None:
127
+ with (
128
+ self.preferred_temp_directory() as tmp_dir,
129
+ ):
130
+ file_path = os.path.join(tmp_dir, "bteq_script.txt")
131
+ with open(file_path, "w", encoding=str(temp_file_read_encoding or "UTF-8")) as f:
132
+ f.write(bteq_script)
133
+ return self._transfer_to_and_execute_bteq_on_remote(
134
+ file_path,
135
+ remote_working_dir,
136
+ bteq_script_encoding,
137
+ timeout,
138
+ timeout_rc,
139
+ bteq_quit_rc,
140
+ bteq_session_encoding,
141
+ tmp_dir,
142
+ )
143
+
144
+ def _transfer_to_and_execute_bteq_on_remote(
145
+ self,
146
+ file_path: str,
147
+ remote_working_dir: str | None,
148
+ bteq_script_encoding: str | None,
149
+ timeout: int,
150
+ timeout_rc: int | None,
151
+ bteq_quit_rc: int | list[int] | tuple[int, ...] | None,
152
+ bteq_session_encoding: str | None,
153
+ tmp_dir: str,
154
+ ) -> int | None:
155
+ encrypted_file_path = None
156
+ remote_encrypted_path = None
157
+ try:
158
+ if self.ssh_hook and self.ssh_hook.get_conn():
159
+ with self.ssh_hook.get_conn() as ssh_client:
160
+ if ssh_client is None:
161
+ raise AirflowException("Failed to establish SSH connection. `ssh_client` is None.")
162
+ verify_bteq_installed_remote(ssh_client)
163
+ password = generate_random_password() # Encryption/Decryption password
164
+ encrypted_file_path = os.path.join(tmp_dir, "bteq_script.enc")
165
+ generate_encrypted_file_with_openssl(file_path, password, encrypted_file_path)
166
+ if not remote_working_dir:
167
+ remote_working_dir = get_remote_tmp_dir(ssh_client)
168
+ self.log.debug(
169
+ "Transferring encrypted BTEQ script to remote host: %s", remote_working_dir
170
+ )
171
+ remote_encrypted_path = os.path.join(remote_working_dir or "", "bteq_script.enc")
172
+ remote_encrypted_path = remote_encrypted_path.replace("/", "\\")
173
+
174
+ transfer_file_sftp(ssh_client, encrypted_file_path, remote_encrypted_path)
175
+
176
+ bteq_command_str = prepare_bteq_command_for_remote_execution(
177
+ timeout=timeout,
178
+ bteq_script_encoding=bteq_script_encoding or "",
179
+ bteq_session_encoding=bteq_session_encoding or "",
180
+ timeout_rc=timeout_rc or -1,
181
+ )
182
+
183
+ exit_status, stdout, stderr = decrypt_remote_file_to_string(
184
+ ssh_client,
185
+ remote_encrypted_path,
186
+ password,
187
+ bteq_command_str,
188
+ )
189
+
190
+ failure_message = None
191
+ password = None # Clear sensitive data
192
+
193
+ if "Failure" in stderr or "Error" in stderr:
194
+ failure_message = stderr
195
+ # Raising an exception if there is any failure in bteq and also user wants to fail the
196
+ # task otherwise just log the error message as warning to not fail the task.
197
+ if (
198
+ failure_message
199
+ and exit_status != 0
200
+ and exit_status
201
+ not in (
202
+ bteq_quit_rc
203
+ if isinstance(bteq_quit_rc, (list, tuple))
204
+ else [bteq_quit_rc if bteq_quit_rc is not None else 0]
205
+ )
206
+ ):
207
+ raise AirflowException(f"BTEQ task failed with error: {failure_message}")
208
+ if failure_message:
209
+ self.log.warning(failure_message)
210
+ return exit_status
211
+ else:
212
+ raise AirflowException("SSH connection is not established. `ssh_hook` is None or invalid.")
213
+ except (OSError, socket.gaierror):
214
+ raise AirflowException(
215
+ "SSH connection timed out. Please check the network or server availability."
216
+ )
217
+ except SSHException as e:
218
+ raise AirflowException(f"An unexpected error occurred during SSH connection: {str(e)}")
219
+ except AirflowException as e:
220
+ raise e
221
+ except Exception as e:
222
+ raise AirflowException(
223
+ f"An unexpected error occurred while executing BTEQ script on remote machine: {str(e)}"
224
+ )
225
+ finally:
226
+ # Remove the local script file
227
+ if encrypted_file_path and os.path.exists(encrypted_file_path):
228
+ os.remove(encrypted_file_path)
229
+ # Cleanup: Delete the remote temporary file
230
+ if remote_encrypted_path:
231
+ if self.ssh_hook and self.ssh_hook.get_conn():
232
+ with self.ssh_hook.get_conn() as ssh_client:
233
+ if ssh_client is None:
234
+ raise AirflowException(
235
+ "Failed to establish SSH connection. `ssh_client` is None."
236
+ )
237
+ # Detect OS
238
+ os_info = identify_os(ssh_client)
239
+ if "windows" in os_info:
240
+ cleanup_en_command = f'del /f /q "{remote_encrypted_path}"'
241
+ else:
242
+ cleanup_en_command = f"rm -f '{remote_encrypted_path}'"
243
+ self.log.debug("cleaning up remote file: %s", cleanup_en_command)
244
+ ssh_client.exec_command(cleanup_en_command)
245
+
246
+ def execute_bteq_script_at_local(
247
+ self,
248
+ bteq_script: str,
249
+ bteq_script_encoding: str | None,
250
+ timeout: int,
251
+ timeout_rc: int | None,
252
+ bteq_quit_rc: int | list[int] | tuple[int, ...] | None,
253
+ bteq_session_encoding: str | None,
254
+ temp_file_read_encoding: str | None,
255
+ ) -> int | None:
256
+ verify_bteq_installed()
257
+ bteq_command_str = prepare_bteq_command_for_local_execution(
258
+ self.get_conn(),
259
+ timeout=timeout,
260
+ bteq_script_encoding=bteq_script_encoding or "",
261
+ bteq_session_encoding=bteq_session_encoding or "",
262
+ timeout_rc=timeout_rc or -1,
263
+ )
264
+ process = subprocess.Popen(
265
+ bteq_command_str,
266
+ stdin=subprocess.PIPE,
267
+ stdout=subprocess.PIPE,
268
+ stderr=subprocess.STDOUT,
269
+ shell=True,
270
+ preexec_fn=os.setsid,
271
+ )
272
+ encode_bteq_script = bteq_script.encode(str(temp_file_read_encoding or "UTF-8"))
273
+ stdout_data, _ = process.communicate(input=encode_bteq_script)
274
+ try:
275
+ # https://docs.python.org/3.10/library/subprocess.html#subprocess.Popen.wait timeout is in seconds
276
+ process.wait(timeout=timeout + 60) # Adding 1 minute extra for BTEQ script timeout
277
+ except subprocess.TimeoutExpired:
278
+ self.on_kill()
279
+ raise AirflowException(f"BTEQ command timed out after {timeout} seconds.")
280
+ conn = self.get_conn()
281
+ conn["sp"] = process # For `on_kill` support
282
+ failure_message = None
283
+ if stdout_data is None:
284
+ raise AirflowException("Process stdout is None. Unable to read BTEQ output.")
285
+ decoded_line = ""
286
+ for line in stdout_data.splitlines():
287
+ try:
288
+ decoded_line = line.decode("UTF-8").strip()
289
+ except UnicodeDecodeError:
290
+ self.log.warning("Failed to decode line: %s", line)
291
+ if "Failure" in decoded_line or "Error" in decoded_line:
292
+ failure_message = decoded_line
293
+ # Raising an exception if there is any failure in bteq and also user wants to fail the
294
+ # task otherwise just log the error message as warning to not fail the task.
295
+ if (
296
+ failure_message
297
+ and process.returncode != 0
298
+ and process.returncode
299
+ not in (
300
+ bteq_quit_rc
301
+ if isinstance(bteq_quit_rc, (list, tuple))
302
+ else [bteq_quit_rc if bteq_quit_rc is not None else 0]
303
+ )
304
+ ):
305
+ raise AirflowException(f"BTEQ task failed with error: {failure_message}")
306
+ if failure_message:
307
+ self.log.warning(failure_message)
308
+
309
+ return process.returncode
310
+
311
+ def on_kill(self):
312
+ """Terminate the subprocess if running."""
313
+ conn = self.get_conn()
314
+ process = conn.get("sp")
315
+ if process:
316
+ try:
317
+ process.terminate()
318
+ process.wait(timeout=5)
319
+ except subprocess.TimeoutExpired:
320
+ self.log.warning("Subprocess did not terminate in time. Forcing kill...")
321
+ process.kill()
322
+ except Exception as e:
323
+ self.log.error("Failed to terminate subprocess: %s", str(e))
324
+
325
+ def get_airflow_home_dir(self) -> str:
326
+ """Get the AIRFLOW_HOME directory."""
327
+ return os.environ.get("AIRFLOW_HOME", "~/airflow")
328
+
329
+ @contextmanager
330
+ def preferred_temp_directory(self, prefix="bteq_"):
331
+ try:
332
+ temp_dir = tempfile.gettempdir()
333
+ if not os.path.isdir(temp_dir) or not os.access(temp_dir, os.W_OK):
334
+ raise OSError("OS temp dir not usable")
335
+ except Exception:
336
+ temp_dir = self.get_airflow_home_dir()
337
+
338
+ with tempfile.TemporaryDirectory(dir=temp_dir, prefix=prefix) as tmp:
339
+ yield tmp
@@ -29,7 +29,10 @@ from teradatasql import TeradataConnection
29
29
  from airflow.providers.common.sql.hooks.sql import DbApiHook
30
30
 
31
31
  if TYPE_CHECKING:
32
- from airflow.models.connection import Connection
32
+ try:
33
+ from airflow.sdk import Connection
34
+ except ImportError:
35
+ from airflow.models.connection import Connection # type: ignore[assignment]
33
36
 
34
37
  PARAM_TYPES = {bool, float, int, str}
35
38
 
@@ -176,7 +179,7 @@ class TeradataHook(DbApiHook):
176
179
 
177
180
  if conn.extra_dejson.get("sslmode", False):
178
181
  conn_config["sslmode"] = conn.extra_dejson["sslmode"]
179
- if "verify" in conn_config["sslmode"]:
182
+ if "verify" in str(conn_config["sslmode"]):
180
183
  if conn.extra_dejson.get("sslca", False):
181
184
  conn_config["sslca"] = conn.extra_dejson["sslca"]
182
185
  if conn.extra_dejson.get("sslcapath", False):
@@ -0,0 +1,97 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ import subprocess
21
+ from abc import ABC
22
+ from typing import Any
23
+
24
+ from airflow.exceptions import AirflowException
25
+
26
+ try:
27
+ from airflow.sdk import BaseHook
28
+ except ImportError:
29
+ from airflow.hooks.base import BaseHook # type: ignore[attr-defined,no-redef]
30
+
31
+
32
+ class TtuHook(BaseHook, ABC):
33
+ """
34
+ Abstract base hook for integrating Teradata Tools and Utilities (TTU) in Airflow.
35
+
36
+ This hook provides common connection handling, resource management, and lifecycle
37
+ support for TTU based operations such as BTEQ, TLOAD, and TPT.
38
+
39
+ It should not be used directly. Instead, it must be subclassed by concrete hooks
40
+ like `BteqHook`, `TloadHook`, or `TddlHook` that implement the actual TTU command logic.
41
+
42
+ Core Features:
43
+ - Establishes a reusable Teradata connection configuration.
44
+ - Provides context management for safe resource cleanup.
45
+ - Manages subprocess termination (e.g., for long-running TTU jobs).
46
+
47
+ Requirements:
48
+ - TTU command-line tools must be installed and accessible via PATH.
49
+ - A valid Airflow connection with Teradata credentials must be configured.
50
+ """
51
+
52
+ def __init__(self, teradata_conn_id: str = "teradata_default", *args, **kwargs) -> None:
53
+ super().__init__()
54
+ self.teradata_conn_id = teradata_conn_id
55
+ self.conn: dict[str, Any] | None = None
56
+
57
+ def __enter__(self):
58
+ return self
59
+
60
+ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
61
+ if self.conn is not None:
62
+ self.close_conn()
63
+
64
+ def get_conn(self) -> dict[str, Any]:
65
+ """
66
+ Set up and return a Teradata connection dictionary.
67
+
68
+ This dictionary includes connection credentials and a subprocess placeholder.
69
+ Ensures connection is created only once per hook instance.
70
+
71
+ :return: Dictionary with connection details.
72
+ """
73
+ if not self.conn:
74
+ connection = self.get_connection(self.teradata_conn_id)
75
+ if not connection.login or not connection.password or not connection.host:
76
+ raise AirflowException("Missing required connection parameters: login, password, or host.")
77
+
78
+ self.conn = dict(
79
+ login=connection.login,
80
+ password=connection.password,
81
+ host=connection.host,
82
+ database=connection.schema,
83
+ sp=None, # Subprocess placeholder
84
+ )
85
+ return self.conn
86
+
87
+ def close_conn(self):
88
+ """Terminate any active TTU subprocess and clear the connection."""
89
+ if self.conn:
90
+ if self.conn.get("sp") and self.conn["sp"].poll() is None:
91
+ self.conn["sp"].terminate()
92
+ try:
93
+ self.conn["sp"].wait(timeout=5)
94
+ except subprocess.TimeoutExpired:
95
+ self.log.warning("Subprocess did not terminate in time. Forcing kill...")
96
+ self.conn["sp"].kill()
97
+ self.conn = None
@@ -0,0 +1,282 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ from typing import TYPE_CHECKING, Literal
21
+
22
+ from airflow.providers.teradata.utils.bteq_util import (
23
+ is_valid_encoding,
24
+ is_valid_file,
25
+ is_valid_remote_bteq_script_file,
26
+ prepare_bteq_script_for_local_execution,
27
+ prepare_bteq_script_for_remote_execution,
28
+ read_file,
29
+ )
30
+
31
+ if TYPE_CHECKING:
32
+ from paramiko import SSHClient
33
+
34
+ try:
35
+ from airflow.sdk.definitions.context import Context
36
+ except ImportError:
37
+ from airflow.utils.context import Context
38
+
39
+ from airflow.providers.ssh.hooks.ssh import SSHHook
40
+ from airflow.providers.teradata.hooks.bteq import BteqHook
41
+ from airflow.providers.teradata.hooks.teradata import TeradataHook
42
+ from airflow.providers.teradata.version_compat import BaseOperator
43
+
44
+
45
+ def contains_template(parameter_value):
46
+ # Check if the parameter contains Jinja templating syntax
47
+ return "{{" in parameter_value and "}}" in parameter_value
48
+
49
+
50
+ class BteqOperator(BaseOperator):
51
+ """
52
+ Teradata Operator to execute SQL Statements or BTEQ (Basic Teradata Query) scripts using Teradata BTEQ utility.
53
+
54
+ This supports execution of BTEQ scripts either locally or remotely via SSH.
55
+
56
+ The BTEQ scripts are used to interact with Teradata databases, allowing users to perform
57
+ operations such as querying, data manipulation, and administrative tasks.
58
+
59
+ Features:
60
+ - Supports both local and remote execution of BTEQ scripts.
61
+ - Handles connection details, script preparation, and execution.
62
+ - Provides robust error handling and logging for debugging.
63
+ - Allows configuration of session parameters like session and BTEQ I/O encoding.
64
+
65
+ .. seealso::
66
+ For more information on how to use this operator, take a look at the guide:
67
+ :ref:`howto/operator:BteqOperator`
68
+
69
+ :param sql: SQL statement(s) to be executed using BTEQ. (templated)
70
+ :param file_path: Optional path to an existing SQL or BTEQ script file. If provided, this file will be used instead of the `sql` content. This path represents remote file path when executing remotely via SSH, or local file path when executing locally.
71
+ :param teradata_conn_id: Reference to a specific Teradata connection.
72
+ :param ssh_conn_id: Optional SSH connection ID for remote execution. Used only when executing scripts remotely.
73
+ :param remote_working_dir: Temporary directory location on the remote host (via SSH) where the BTEQ script will be transferred and executed. Defaults to `/tmp` if not specified. This is only applicable when `ssh_conn_id` is provided.
74
+ :param bteq_session_encoding: Character set encoding for the BTEQ session. Defaults to ASCII if not specified.
75
+ :param bteq_script_encoding: Character encoding for the BTEQ script file. Defaults to ASCII if not specified.
76
+ :param bteq_quit_rc: Accepts a single integer, list, or tuple of return codes. Specifies which BTEQ return codes should be treated as successful, allowing subsequent tasks to continue execution.
77
+ :param timeout: Timeout (in seconds) for executing the BTEQ command. Default is 600 seconds (10 minutes).
78
+ :param timeout_rc: Return code to use if the BTEQ execution fails due to a timeout. To allow DAG execution to continue after a timeout, include this value in `bteq_quit_rc`. If not specified, a timeout will raise an exception and stop the DAG.
79
+ """
80
+
81
+ template_fields = "sql"
82
+ ui_color = "#ff976d"
83
+
84
+ def __init__(
85
+ self,
86
+ *,
87
+ sql: str | None = None,
88
+ file_path: str | None = None,
89
+ teradata_conn_id: str = TeradataHook.default_conn_name,
90
+ ssh_conn_id: str | None = None,
91
+ remote_working_dir: str | None = None,
92
+ bteq_session_encoding: str | None = None,
93
+ bteq_script_encoding: str | None = None,
94
+ bteq_quit_rc: int | list[int] | tuple[int, ...] | None = None,
95
+ timeout: int | Literal[600] = 600, # Default to 10 minutes
96
+ timeout_rc: int | None = None,
97
+ **kwargs,
98
+ ) -> None:
99
+ super().__init__(**kwargs)
100
+ self.sql = sql
101
+ self.file_path = file_path
102
+ self.teradata_conn_id = teradata_conn_id
103
+ self.ssh_conn_id = ssh_conn_id
104
+ self.remote_working_dir = remote_working_dir
105
+ self.timeout = timeout
106
+ self.timeout_rc = timeout_rc
107
+ self.bteq_session_encoding = bteq_session_encoding
108
+ self.bteq_script_encoding = bteq_script_encoding
109
+ self.bteq_quit_rc = bteq_quit_rc
110
+ self._hook: BteqHook | None = None
111
+ self._ssh_hook: SSHHook | None = None
112
+ self.temp_file_read_encoding = "UTF-8"
113
+
114
+ def execute(self, context: Context) -> int | None:
115
+ """Execute BTEQ code using the BteqHook."""
116
+ if not self.sql and not self.file_path:
117
+ raise ValueError(
118
+ "BteqOperator requires either the 'sql' or 'file_path' parameter. Both are missing."
119
+ )
120
+ self._hook = BteqHook(teradata_conn_id=self.teradata_conn_id, ssh_conn_id=self.ssh_conn_id)
121
+ self._ssh_hook = SSHHook(ssh_conn_id=self.ssh_conn_id) if self.ssh_conn_id else None
122
+
123
+ # Validate and set BTEQ session and script encoding
124
+ if not self.bteq_session_encoding or self.bteq_session_encoding == "ASCII":
125
+ self.bteq_session_encoding = ""
126
+ if self.bteq_script_encoding == "UTF8":
127
+ self.temp_file_read_encoding = "UTF-8"
128
+ elif self.bteq_script_encoding == "UTF16":
129
+ self.temp_file_read_encoding = "UTF-16"
130
+ self.bteq_script_encoding = ""
131
+ elif self.bteq_session_encoding == "UTF8" and (
132
+ not self.bteq_script_encoding or self.bteq_script_encoding == "ASCII"
133
+ ):
134
+ self.bteq_script_encoding = "UTF8"
135
+ elif self.bteq_session_encoding == "UTF16":
136
+ if not self.bteq_script_encoding or self.bteq_script_encoding == "ASCII":
137
+ self.bteq_script_encoding = "UTF8"
138
+ # for file reading in python. Mapping BTEQ encoding to Python encoding
139
+ if self.bteq_script_encoding == "UTF8":
140
+ self.temp_file_read_encoding = "UTF-8"
141
+ elif self.bteq_script_encoding == "UTF16":
142
+ self.temp_file_read_encoding = "UTF-16"
143
+
144
+ # Handling execution on local:
145
+ if not self._ssh_hook:
146
+ if self.sql:
147
+ bteq_script = prepare_bteq_script_for_local_execution(
148
+ sql=self.sql,
149
+ )
150
+ return self._hook.execute_bteq_script(
151
+ bteq_script,
152
+ self.remote_working_dir,
153
+ self.bteq_script_encoding,
154
+ self.timeout,
155
+ self.timeout_rc,
156
+ self.bteq_session_encoding,
157
+ self.bteq_quit_rc,
158
+ self.temp_file_read_encoding,
159
+ )
160
+ if self.file_path:
161
+ if not is_valid_file(self.file_path):
162
+ raise ValueError(
163
+ f"The provided file path '{self.file_path}' is invalid or does not exist."
164
+ )
165
+ try:
166
+ is_valid_encoding(self.file_path, self.temp_file_read_encoding or "UTF-8")
167
+ except UnicodeDecodeError as e:
168
+ errmsg = f"The provided file '{self.file_path}' encoding is different from BTEQ I/O encoding i.e.'UTF-8'."
169
+ if self.bteq_script_encoding:
170
+ errmsg = f"The provided file '{self.file_path}' encoding is different from the specified BTEQ I/O encoding '{self.bteq_script_encoding}'."
171
+ raise ValueError(errmsg) from e
172
+ return self._handle_local_bteq_file(
173
+ file_path=self.file_path,
174
+ context=context,
175
+ )
176
+ # Execution on Remote machine
177
+ elif self._ssh_hook:
178
+ # When sql statement is provided as input through sql parameter, Preparing the bteq script
179
+ if self.sql:
180
+ bteq_script = prepare_bteq_script_for_remote_execution(
181
+ conn=self._hook.get_conn(),
182
+ sql=self.sql,
183
+ )
184
+ return self._hook.execute_bteq_script(
185
+ bteq_script,
186
+ self.remote_working_dir,
187
+ self.bteq_script_encoding,
188
+ self.timeout,
189
+ self.timeout_rc,
190
+ self.bteq_session_encoding,
191
+ self.bteq_quit_rc,
192
+ self.temp_file_read_encoding,
193
+ )
194
+ if self.file_path:
195
+ with self._ssh_hook.get_conn() as ssh_client:
196
+ # When .sql or .bteq remote file path is provided as input through file_path parameter, executing on remote machine
197
+ if self.file_path and is_valid_remote_bteq_script_file(ssh_client, self.file_path):
198
+ return self._handle_remote_bteq_file(
199
+ ssh_client=self._ssh_hook.get_conn(),
200
+ file_path=self.file_path,
201
+ context=context,
202
+ )
203
+ raise ValueError(
204
+ f"The provided remote file path '{self.file_path}' is invalid or file does not exist on remote machine at given path."
205
+ )
206
+ else:
207
+ raise ValueError(
208
+ "BteqOperator requires either the 'sql' or 'file_path' parameter. Both are missing."
209
+ )
210
+ return None
211
+
212
+ def _handle_remote_bteq_file(
213
+ self,
214
+ ssh_client: SSHClient,
215
+ file_path: str | None,
216
+ context: Context,
217
+ ) -> int | None:
218
+ if file_path:
219
+ with ssh_client:
220
+ sftp = ssh_client.open_sftp()
221
+ try:
222
+ with sftp.open(file_path, "r") as remote_file:
223
+ original_content = remote_file.read().decode(self.temp_file_read_encoding or "UTF-8")
224
+ finally:
225
+ sftp.close()
226
+ rendered_content = original_content
227
+ if contains_template(original_content):
228
+ rendered_content = self.render_template(original_content, context)
229
+ if self._hook:
230
+ bteq_script = prepare_bteq_script_for_remote_execution(
231
+ conn=self._hook.get_conn(),
232
+ sql=rendered_content,
233
+ )
234
+ return self._hook.execute_bteq_script_at_remote(
235
+ bteq_script,
236
+ self.remote_working_dir,
237
+ self.bteq_script_encoding,
238
+ self.timeout,
239
+ self.timeout_rc,
240
+ self.bteq_session_encoding,
241
+ self.bteq_quit_rc,
242
+ self.temp_file_read_encoding,
243
+ )
244
+ return None
245
+ raise ValueError(
246
+ "Please provide a valid file path for the BTEQ script to be executed on the remote machine."
247
+ )
248
+
249
+ def _handle_local_bteq_file(
250
+ self,
251
+ file_path: str,
252
+ context: Context,
253
+ ) -> int | None:
254
+ if file_path and is_valid_file(file_path):
255
+ file_content = read_file(file_path, encoding=str(self.temp_file_read_encoding or "UTF-8"))
256
+ # Manually render using operator's context
257
+ rendered_content = file_content
258
+ if contains_template(file_content):
259
+ rendered_content = self.render_template(file_content, context)
260
+ bteq_script = prepare_bteq_script_for_local_execution(
261
+ sql=rendered_content,
262
+ )
263
+ if self._hook:
264
+ result = self._hook.execute_bteq_script(
265
+ bteq_script,
266
+ self.remote_working_dir,
267
+ self.bteq_script_encoding,
268
+ self.timeout,
269
+ self.timeout_rc,
270
+ self.bteq_session_encoding,
271
+ self.bteq_quit_rc,
272
+ self.temp_file_read_encoding,
273
+ )
274
+ return result
275
+ return None
276
+
277
+ def on_kill(self) -> None:
278
+ """Handle task termination by invoking the on_kill method of BteqHook."""
279
+ if self._hook:
280
+ self._hook.on_kill()
281
+ else:
282
+ self.log.warning("BteqHook was not initialized. Nothing to terminate.")
@@ -20,9 +20,9 @@ from __future__ import annotations
20
20
  from collections.abc import Sequence
21
21
  from typing import TYPE_CHECKING, ClassVar
22
22
 
23
- from airflow.models import BaseOperator
24
23
  from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
25
24
  from airflow.providers.teradata.hooks.teradata import TeradataHook
25
+ from airflow.providers.teradata.version_compat import BaseOperator
26
26
 
27
27
  if TYPE_CHECKING:
28
28
  try:
@@ -23,9 +23,9 @@ from enum import Enum
23
23
  from functools import cached_property
24
24
  from typing import TYPE_CHECKING
25
25
 
26
- from airflow.models import BaseOperator
27
26
  from airflow.providers.teradata.hooks.teradata import TeradataHook
28
27
  from airflow.providers.teradata.utils.constants import Constants
28
+ from airflow.providers.teradata.version_compat import BaseOperator
29
29
 
30
30
  if TYPE_CHECKING:
31
31
  try:
@@ -21,8 +21,6 @@ from collections.abc import Sequence
21
21
  from textwrap import dedent
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.models import BaseOperator
25
-
26
24
  try:
27
25
  from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
28
26
  except ModuleNotFoundError as e:
@@ -31,6 +29,7 @@ except ModuleNotFoundError as e:
31
29
  raise AirflowOptionalProviderFeatureException(e)
32
30
 
33
31
  from airflow.providers.teradata.hooks.teradata import TeradataHook
32
+ from airflow.providers.teradata.version_compat import BaseOperator
34
33
 
35
34
  if TYPE_CHECKING:
36
35
  from airflow.utils.context import Context
@@ -21,8 +21,6 @@ from collections.abc import Sequence
21
21
  from textwrap import dedent
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.models import BaseOperator
25
-
26
24
  try:
27
25
  from airflow.providers.amazon.aws.hooks.s3 import S3Hook
28
26
  except ModuleNotFoundError as e:
@@ -30,6 +28,7 @@ except ModuleNotFoundError as e:
30
28
 
31
29
  raise AirflowOptionalProviderFeatureException(e)
32
30
  from airflow.providers.teradata.hooks.teradata import TeradataHook
31
+ from airflow.providers.teradata.version_compat import BaseOperator
33
32
 
34
33
  if TYPE_CHECKING:
35
34
  try:
@@ -21,8 +21,8 @@ from collections.abc import Sequence
21
21
  from functools import cached_property
22
22
  from typing import TYPE_CHECKING
23
23
 
24
- from airflow.models import BaseOperator
25
24
  from airflow.providers.teradata.hooks.teradata import TeradataHook
25
+ from airflow.providers.teradata.version_compat import BaseOperator
26
26
 
27
27
  if TYPE_CHECKING:
28
28
  try:
@@ -0,0 +1,217 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ import os
21
+ import shutil
22
+ import stat
23
+ from typing import TYPE_CHECKING, Any
24
+
25
+ if TYPE_CHECKING:
26
+ from paramiko import SSHClient
27
+
28
+ from airflow.exceptions import AirflowException
29
+
30
+
31
+ def identify_os(ssh_client: SSHClient) -> str:
32
+ stdin, stdout, stderr = ssh_client.exec_command("uname || ver")
33
+ return stdout.read().decode().lower()
34
+
35
+
36
+ def verify_bteq_installed():
37
+ """Verify if BTEQ is installed and available in the system's PATH."""
38
+ if shutil.which("bteq") is None:
39
+ raise AirflowException("BTEQ is not installed or not available in the system's PATH.")
40
+
41
+
42
+ def verify_bteq_installed_remote(ssh_client: SSHClient):
43
+ """Verify if BTEQ is installed on the remote machine."""
44
+ # Detect OS
45
+ os_info = identify_os(ssh_client)
46
+
47
+ if "windows" in os_info:
48
+ check_cmd = "where bteq"
49
+ elif "darwin" in os_info:
50
+ # Check if zsh exists first
51
+ stdin, stdout, stderr = ssh_client.exec_command("command -v zsh")
52
+ zsh_path = stdout.read().strip()
53
+ if zsh_path:
54
+ check_cmd = 'zsh -l -c "which bteq"'
55
+ else:
56
+ check_cmd = "which bteq"
57
+ else:
58
+ check_cmd = "which bteq"
59
+
60
+ stdin, stdout, stderr = ssh_client.exec_command(check_cmd)
61
+ exit_status = stdout.channel.recv_exit_status()
62
+ output = stdout.read().strip()
63
+ error = stderr.read().strip()
64
+
65
+ if exit_status != 0 or not output:
66
+ raise AirflowException(
67
+ f"BTEQ is not installed or not available in PATH. stderr: {error.decode() if error else 'N/A'}"
68
+ )
69
+
70
+
71
+ def transfer_file_sftp(ssh_client, local_path, remote_path):
72
+ sftp = ssh_client.open_sftp()
73
+ sftp.put(local_path, remote_path)
74
+ sftp.close()
75
+
76
+
77
+ def get_remote_tmp_dir(ssh_client):
78
+ os_info = identify_os(ssh_client)
79
+
80
+ if "windows" in os_info:
81
+ # Try getting Windows temp dir
82
+ stdin, stdout, stderr = ssh_client.exec_command("echo %TEMP%")
83
+ tmp_dir = stdout.read().decode().strip()
84
+ if not tmp_dir:
85
+ tmp_dir = "C:\\Temp"
86
+ else:
87
+ tmp_dir = "/tmp"
88
+ return tmp_dir
89
+
90
+
91
+ # We can not pass host details with bteq command when executing on remote machine. Instead, we will prepare .logon in bteq script itself to avoid risk of
92
+ # exposing sensitive information
93
+ def prepare_bteq_script_for_remote_execution(conn: dict[str, Any], sql: str) -> str:
94
+ """Build a BTEQ script with necessary connection and session commands."""
95
+ script_lines = []
96
+ host = conn["host"]
97
+ login = conn["login"]
98
+ password = conn["password"]
99
+ script_lines.append(f" .LOGON {host}/{login},{password}")
100
+ return _prepare_bteq_script(script_lines, sql)
101
+
102
+
103
+ def prepare_bteq_script_for_local_execution(
104
+ sql: str,
105
+ ) -> str:
106
+ """Build a BTEQ script with necessary connection and session commands."""
107
+ script_lines: list[str] = []
108
+ return _prepare_bteq_script(script_lines, sql)
109
+
110
+
111
+ def _prepare_bteq_script(script_lines: list[str], sql: str) -> str:
112
+ script_lines.append(sql.strip())
113
+ script_lines.append(".EXIT")
114
+ return "\n".join(script_lines)
115
+
116
+
117
+ def _prepare_bteq_command(
118
+ timeout: int,
119
+ bteq_script_encoding: str,
120
+ bteq_session_encoding: str,
121
+ timeout_rc: int,
122
+ ) -> list[str]:
123
+ bteq_core_cmd = ["bteq"]
124
+ if bteq_session_encoding:
125
+ bteq_core_cmd.append(f" -e {bteq_script_encoding}")
126
+ bteq_core_cmd.append(f" -c {bteq_session_encoding}")
127
+ bteq_core_cmd.append('"')
128
+ bteq_core_cmd.append(f".SET EXITONDELAY ON MAXREQTIME {timeout}")
129
+ if timeout_rc is not None and timeout_rc >= 0:
130
+ bteq_core_cmd.append(f" RC {timeout_rc}")
131
+ bteq_core_cmd.append(";")
132
+ # Airflow doesn't display the script of BTEQ in UI but only in log so WIDTH is 500 enough
133
+ bteq_core_cmd.append(" .SET WIDTH 500;")
134
+ return bteq_core_cmd
135
+
136
+
137
+ def prepare_bteq_command_for_remote_execution(
138
+ timeout: int,
139
+ bteq_script_encoding: str,
140
+ bteq_session_encoding: str,
141
+ timeout_rc: int,
142
+ ) -> str:
143
+ """Prepare the BTEQ command with necessary parameters."""
144
+ bteq_core_cmd = _prepare_bteq_command(timeout, bteq_script_encoding, bteq_session_encoding, timeout_rc)
145
+ bteq_core_cmd.append('"')
146
+ return " ".join(bteq_core_cmd)
147
+
148
+
149
+ def prepare_bteq_command_for_local_execution(
150
+ conn: dict[str, Any],
151
+ timeout: int,
152
+ bteq_script_encoding: str,
153
+ bteq_session_encoding: str,
154
+ timeout_rc: int,
155
+ ) -> str:
156
+ """Prepare the BTEQ command with necessary parameters."""
157
+ bteq_core_cmd = _prepare_bteq_command(timeout, bteq_script_encoding, bteq_session_encoding, timeout_rc)
158
+ host = conn["host"]
159
+ login = conn["login"]
160
+ password = conn["password"]
161
+ bteq_core_cmd.append(f" .LOGON {host}/{login},{password}")
162
+ bteq_core_cmd.append('"')
163
+ bteq_command_str = " ".join(bteq_core_cmd)
164
+ return bteq_command_str
165
+
166
+
167
+ def is_valid_file(file_path: str) -> bool:
168
+ return os.path.isfile(file_path)
169
+
170
+
171
+ def is_valid_encoding(file_path: str, encoding: str = "UTF-8") -> bool:
172
+ """
173
+ Check if the file can be read with the specified encoding.
174
+
175
+ :param file_path: Path to the file to be checked.
176
+ :param encoding: Encoding to use for reading the file.
177
+ :return: True if the file can be read with the specified encoding, False otherwise.
178
+ """
179
+ with open(file_path, encoding=encoding) as f:
180
+ f.read()
181
+ return True
182
+
183
+
184
+ def read_file(file_path: str, encoding: str = "UTF-8") -> str:
185
+ """
186
+ Read the content of a file with the specified encoding.
187
+
188
+ :param file_path: Path to the file to be read.
189
+ :param encoding: Encoding to use for reading the file.
190
+ :return: Content of the file as a string.
191
+ """
192
+ if not os.path.isfile(file_path):
193
+ raise FileNotFoundError(f"The file {file_path} does not exist.")
194
+
195
+ with open(file_path, encoding=encoding) as f:
196
+ return f.read()
197
+
198
+
199
+ def is_valid_remote_bteq_script_file(ssh_client: SSHClient, remote_file_path: str, logger=None) -> bool:
200
+ """Check if the given remote file path is a valid BTEQ script file."""
201
+ if remote_file_path:
202
+ sftp_client = ssh_client.open_sftp()
203
+ try:
204
+ # Get file metadata
205
+ file_stat = sftp_client.stat(remote_file_path)
206
+ if file_stat.st_mode:
207
+ is_regular_file = stat.S_ISREG(file_stat.st_mode)
208
+ return is_regular_file
209
+ return False
210
+ except FileNotFoundError:
211
+ if logger:
212
+ logger.error("File does not exist on remote at : %s", remote_file_path)
213
+ return False
214
+ finally:
215
+ sftp_client.close()
216
+ else:
217
+ return False
@@ -0,0 +1,75 @@
1
+ #
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+ from __future__ import annotations
19
+
20
+ import secrets
21
+ import string
22
+ import subprocess
23
+
24
+
25
+ def generate_random_password(length=12):
26
+ # Define the character set: letters, digits, and special characters
27
+ characters = string.ascii_letters + string.digits + string.punctuation
28
+ # Generate a random password
29
+ password = "".join(secrets.choice(characters) for _ in range(length))
30
+ return password
31
+
32
+
33
+ def generate_encrypted_file_with_openssl(file_path: str, password: str, out_file: str):
34
+ # Write plaintext temporarily to file
35
+
36
+ # Run openssl enc with AES-256-CBC, pbkdf2, salt
37
+ cmd = [
38
+ "openssl",
39
+ "enc",
40
+ "-aes-256-cbc",
41
+ "-salt",
42
+ "-pbkdf2",
43
+ "-pass",
44
+ f"pass:{password}",
45
+ "-in",
46
+ file_path,
47
+ "-out",
48
+ out_file,
49
+ ]
50
+ subprocess.run(cmd, check=True)
51
+
52
+
53
+ def decrypt_remote_file_to_string(ssh_client, remote_enc_file, password, bteq_command_str):
54
+ # Run openssl decrypt command on remote machine
55
+ quoted_password = shell_quote_single(password)
56
+
57
+ decrypt_cmd = (
58
+ f"openssl enc -d -aes-256-cbc -salt -pbkdf2 -pass pass:{quoted_password} -in {remote_enc_file} | "
59
+ + bteq_command_str
60
+ )
61
+ # Clear password to prevent lingering sensitive data
62
+ password = None
63
+ quoted_password = None
64
+ stdin, stdout, stderr = ssh_client.exec_command(decrypt_cmd)
65
+ # Wait for command to finish
66
+ exit_status = stdout.channel.recv_exit_status()
67
+ output = stdout.read().decode()
68
+ err = stderr.read().decode()
69
+ return exit_status, output, err
70
+
71
+
72
+ def shell_quote_single(s):
73
+ # Escape single quotes in s, then wrap in single quotes
74
+ # In shell, to include a single quote inside single quotes, close, add '\'' and reopen
75
+ return "'" + s.replace("'", "'\\''") + "'"
@@ -0,0 +1,42 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+ #
18
+ # NOTE! THIS FILE IS COPIED MANUALLY IN OTHER PROVIDERS DELIBERATELY TO AVOID ADDING UNNECESSARY
19
+ # DEPENDENCIES BETWEEN PROVIDERS. IF YOU WANT TO ADD CONDITIONAL CODE IN YOUR PROVIDER THAT DEPENDS
20
+ # ON AIRFLOW VERSION, PLEASE COPY THIS FILE TO THE ROOT PACKAGE OF YOUR PROVIDER AND IMPORT
21
+ # THOSE CONSTANTS FROM IT RATHER THAN IMPORTING THEM FROM ANOTHER PROVIDER OR TEST CODE
22
+ #
23
+ from __future__ import annotations
24
+
25
+
26
+ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
27
+ from packaging.version import Version
28
+
29
+ from airflow import __version__
30
+
31
+ airflow_version = Version(__version__)
32
+ return airflow_version.major, airflow_version.minor, airflow_version.micro
33
+
34
+
35
+ AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+
37
+ if AIRFLOW_V_3_0_PLUS:
38
+ from airflow.sdk import BaseOperator
39
+ else:
40
+ from airflow.models import BaseOperator # type: ignore[no-redef]
41
+
42
+ __all__ = ["AIRFLOW_V_3_0_PLUS", "BaseOperator"]
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-teradata
3
- Version: 3.1.0rc1
3
+ Version: 3.2.0rc1
4
4
  Summary: Provider package apache-airflow-providers-teradata for Apache Airflow
5
5
  Keywords: airflow-provider,teradata,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: ~=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,7 +15,6 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
@@ -26,15 +25,17 @@ Requires-Dist: teradatasqlalchemy>=17.20.0.0
26
25
  Requires-Dist: teradatasql>=17.20.0.28
27
26
  Requires-Dist: apache-airflow-providers-amazon ; extra == "amazon"
28
27
  Requires-Dist: apache-airflow-providers-microsoft-azure ; extra == "microsoft-azure"
28
+ Requires-Dist: apache-airflow-providers-ssh ; extra == "ssh"
29
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-teradata/3.1.0/changelog.html
31
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-teradata/3.1.0
30
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-teradata/3.2.0/changelog.html
31
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-teradata/3.2.0
32
32
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
33
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
34
  Project-URL: Source Code, https://github.com/apache/airflow
35
35
  Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
36
36
  Provides-Extra: amazon
37
37
  Provides-Extra: microsoft-azure
38
+ Provides-Extra: ssh
38
39
 
39
40
 
40
41
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -61,7 +62,7 @@ Provides-Extra: microsoft-azure
61
62
 
62
63
  Package ``apache-airflow-providers-teradata``
63
64
 
64
- Release: ``3.1.0``
65
+ Release: ``3.2.0``
65
66
 
66
67
 
67
68
  `Teradata <https://www.teradata.com/>`__
@@ -74,7 +75,7 @@ This is a provider package for ``teradata`` provider. All classes for this provi
74
75
  are in ``airflow.providers.teradata`` python package.
75
76
 
76
77
  You can find package information and changelog for the provider
77
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.1.0/>`_.
78
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.0/>`_.
78
79
 
79
80
  Installation
80
81
  ------------
@@ -83,7 +84,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
83
84
  for the minimum Airflow version supported) via
84
85
  ``pip install apache-airflow-providers-teradata``
85
86
 
86
- The package supports the following python versions: 3.9,3.10,3.11,3.12
87
+ The package supports the following python versions: 3.10,3.11,3.12
87
88
 
88
89
  Requirements
89
90
  ------------
@@ -116,8 +117,9 @@ Dependent package
116
117
  `apache-airflow-providers-amazon <https://airflow.apache.org/docs/apache-airflow-providers-amazon>`_ ``amazon``
117
118
  `apache-airflow-providers-common-sql <https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_ ``common.sql``
118
119
  `apache-airflow-providers-microsoft-azure <https://airflow.apache.org/docs/apache-airflow-providers-microsoft-azure>`_ ``microsoft.azure``
120
+ `apache-airflow-providers-ssh <https://airflow.apache.org/docs/apache-airflow-providers-ssh>`_ ``ssh``
119
121
  ====================================================================================================================== ===================
120
122
 
121
123
  The changelog for the provider package can be found in the
122
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.1.0/changelog.html>`_.
124
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-teradata/3.2.0/changelog.html>`_.
123
125
 
@@ -0,0 +1,26 @@
1
+ airflow/providers/teradata/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
+ airflow/providers/teradata/__init__.py,sha256=gsGQuyJbj0NqeMWHeK5du0fxg7L5EMOEvWtYIT8oW7w,1497
3
+ airflow/providers/teradata/get_provider_info.py,sha256=SF-3YIl3CCi6mN6b9EEqLkJNyuMIM35CvY_H91QdELw,4031
4
+ airflow/providers/teradata/version_compat.py,sha256=0VNFig0J24YH-32j11aTLlLRXdnHCmQHrNYkBXbOjx4,1727
5
+ airflow/providers/teradata/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
6
+ airflow/providers/teradata/hooks/bteq.py,sha256=9N9z2NbouKbwYiikCbMkVAobvgpRvHgN4uzChQ5c8GU,15067
7
+ airflow/providers/teradata/hooks/teradata.py,sha256=fOt3ZriM-rB27d-7RWhoEBZJyZcNkHowrSh4hqiNNi8,10964
8
+ airflow/providers/teradata/hooks/ttu.py,sha256=6editw7RKhLC8RBLmeZ4r4ILhrib1tOjytLJykPec5g,3778
9
+ airflow/providers/teradata/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
10
+ airflow/providers/teradata/operators/bteq.py,sha256=w7CysOA1ioVvTLhPsj_g3paCM8YH0Yh-wUJLKRJsLZk,13223
11
+ airflow/providers/teradata/operators/teradata.py,sha256=G8Vgu2g-lyX7lisdEZOafIFJEx-eeeM9bgNI4fDKnq0,3873
12
+ airflow/providers/teradata/operators/teradata_compute_cluster.py,sha256=asnuih4an1AuKa2rBfyOuDDhfENx3sjvQe5LsfObvI0,21961
13
+ airflow/providers/teradata/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
+ airflow/providers/teradata/transfers/azure_blob_to_teradata.py,sha256=XnMsQSLS-kQP7aBRer3bLro8qgNohcJ3H63c2YIjYMA,5678
15
+ airflow/providers/teradata/transfers/s3_to_teradata.py,sha256=-YskshSjx-qVniuSIHIQc4qJmfpxvhqiiy2zvXDzewk,5700
16
+ airflow/providers/teradata/transfers/teradata_to_teradata.py,sha256=J6ibOly6vjzS4Vwf0oLOL5pILCoyI8q9wDwpVP9efV4,4045
17
+ airflow/providers/teradata/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
18
+ airflow/providers/teradata/triggers/teradata_compute_cluster.py,sha256=hjMTnOpqlbByTtmNdJ9usK7hilEAz4tFXpJoENgFhyo,6987
19
+ airflow/providers/teradata/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
20
+ airflow/providers/teradata/utils/bteq_util.py,sha256=GCIPc1PCIOC-YWwxgYHr-N1d7X8ZGPU5Lmax_JZY6rA,7360
21
+ airflow/providers/teradata/utils/constants.py,sha256=ro1FVNsAakal8_uX27aN0DTVO0T9FG4fv9HzBIY2I-w,2253
22
+ airflow/providers/teradata/utils/encryption_utils.py,sha256=ARGWmgBbvSq6_MQHfTevvfvHjaiBjQI62UXltDcJLJo,2578
23
+ apache_airflow_providers_teradata-3.2.0rc1.dist-info/entry_points.txt,sha256=JbigXoUoKVSNWG-_-029FCCuehMOmAvuSnNGZ9Bz1Kc,104
24
+ apache_airflow_providers_teradata-3.2.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
25
+ apache_airflow_providers_teradata-3.2.0rc1.dist-info/METADATA,sha256=oPXxhROtkfGAKjX944w2UqLQER_8fIgJ30Fs9vAZv-s,5964
26
+ apache_airflow_providers_teradata-3.2.0rc1.dist-info/RECORD,,
@@ -1,20 +0,0 @@
1
- airflow/providers/teradata/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/teradata/__init__.py,sha256=paySx53D2F3C5LDr55f32gk1eHLAD8p6o3d2pk6QbhE,1497
3
- airflow/providers/teradata/get_provider_info.py,sha256=lkm3HMP6Yq8hPF0BeAD9uIA2IgJovTMFDR9NPT8pHyU,3718
4
- airflow/providers/teradata/hooks/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
5
- airflow/providers/teradata/hooks/teradata.py,sha256=v7jCwOTBlAMtnR5cAakkNn0WSYGELcEACmV8I7yxTW0,10851
6
- airflow/providers/teradata/operators/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
7
- airflow/providers/teradata/operators/teradata.py,sha256=chuojW__qKu2RdWQpKSF3QN0rbTWB9TsAece8cKj0AU,3846
8
- airflow/providers/teradata/operators/teradata_compute_cluster.py,sha256=4c_EGQYcwuuXO62u_rwmyyXJ9Ph2rjYPCdjhV_lxJMk,21934
9
- airflow/providers/teradata/transfers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
10
- airflow/providers/teradata/transfers/azure_blob_to_teradata.py,sha256=Kcm97lJXAkNSdQf9u3yYY9iq9JtjB_Sr96nHqJBLNlQ,5652
11
- airflow/providers/teradata/transfers/s3_to_teradata.py,sha256=kWqTg9Z2Sl16BWdJWO3OXrnZveVBrj5byVuZtggfBbY,5674
12
- airflow/providers/teradata/transfers/teradata_to_teradata.py,sha256=z3sNPxZB012VZeUcLC16EwkpIUeENu3isw7bzwlb_r8,4018
13
- airflow/providers/teradata/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
- airflow/providers/teradata/triggers/teradata_compute_cluster.py,sha256=hjMTnOpqlbByTtmNdJ9usK7hilEAz4tFXpJoENgFhyo,6987
15
- airflow/providers/teradata/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
16
- airflow/providers/teradata/utils/constants.py,sha256=ro1FVNsAakal8_uX27aN0DTVO0T9FG4fv9HzBIY2I-w,2253
17
- apache_airflow_providers_teradata-3.1.0rc1.dist-info/entry_points.txt,sha256=JbigXoUoKVSNWG-_-029FCCuehMOmAvuSnNGZ9Bz1Kc,104
18
- apache_airflow_providers_teradata-3.1.0rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
19
- apache_airflow_providers_teradata-3.1.0rc1.dist-info/METADATA,sha256=fA2OtWXWGzz3KU1w0BX0qRaPzjSMw403FXLjqmPwIT4,5808
20
- apache_airflow_providers_teradata-3.1.0rc1.dist-info/RECORD,,