apache-airflow-providers-teradata 3.2.3__py3-none-any.whl → 3.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "3.2.3"
32
+ __version__ = "3.4.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-teradata:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-teradata:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -50,6 +50,13 @@ def get_provider_info():
50
50
  "logo": "/docs/integration-logos/Teradata.png",
51
51
  "tags": ["software"],
52
52
  },
53
+ {
54
+ "integration-name": "Tpt",
55
+ "external-doc-url": "https://www.teradata.com/",
56
+ "how-to-guide": ["/docs/apache-airflow-providers-teradata/operators/tpt.rst"],
57
+ "logo": "/docs/integration-logos/Teradata.png",
58
+ "tags": ["software"],
59
+ },
53
60
  ],
54
61
  "operators": [
55
62
  {
@@ -60,11 +67,13 @@ def get_provider_info():
60
67
  ],
61
68
  },
62
69
  {"integration-name": "Bteq", "python-modules": ["airflow.providers.teradata.operators.bteq"]},
70
+ {"integration-name": "Tpt", "python-modules": ["airflow.providers.teradata.operators.tpt"]},
63
71
  ],
64
72
  "hooks": [
65
73
  {"integration-name": "Teradata", "python-modules": ["airflow.providers.teradata.hooks.teradata"]},
66
74
  {"integration-name": "Ttu", "python-modules": ["airflow.providers.teradata.hooks.ttu"]},
67
75
  {"integration-name": "Bteq", "python-modules": ["airflow.providers.teradata.hooks.bteq"]},
76
+ {"integration-name": "Tpt", "python-modules": ["airflow.providers.teradata.hooks.tpt"]},
68
77
  ],
69
78
  "transfers": [
70
79
  {
@@ -25,7 +25,7 @@ from contextlib import contextmanager
25
25
 
26
26
  from paramiko import SSHException
27
27
 
28
- from airflow.exceptions import AirflowException
28
+ from airflow.providers.common.compat.sdk import AirflowException
29
29
  from airflow.providers.ssh.hooks.ssh import SSHHook
30
30
  from airflow.providers.teradata.hooks.ttu import TtuHook
31
31
  from airflow.providers.teradata.utils.bteq_util import (
@@ -0,0 +1,499 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+
20
+ import logging
21
+ import os
22
+ import shutil
23
+ import socket
24
+ import subprocess
25
+ import tempfile
26
+ import uuid
27
+ from collections.abc import Generator
28
+ from contextlib import contextmanager
29
+
30
+ from paramiko import SSHException
31
+
32
+ from airflow.providers.ssh.hooks.ssh import SSHHook
33
+ from airflow.providers.teradata.hooks.ttu import TtuHook
34
+ from airflow.providers.teradata.utils.encryption_utils import (
35
+ generate_encrypted_file_with_openssl,
36
+ generate_random_password,
37
+ )
38
+ from airflow.providers.teradata.utils.tpt_util import (
39
+ decrypt_remote_file,
40
+ execute_remote_command,
41
+ remote_secure_delete,
42
+ secure_delete,
43
+ set_local_file_permissions,
44
+ set_remote_file_permissions,
45
+ terminate_subprocess,
46
+ transfer_file_sftp,
47
+ verify_tpt_utility_on_remote_host,
48
+ write_file,
49
+ )
50
+
51
+
52
+ class TptHook(TtuHook):
53
+ """
54
+ Hook for executing Teradata Parallel Transporter (TPT) operations.
55
+
56
+ This hook provides methods to execute TPT operations both locally and remotely via SSH.
57
+ It supports DDL operations using tbuild utility. and data loading operations using tdload.
58
+ It extends the `TtuHook` and integrates with Airflow's SSHHook for remote execution.
59
+
60
+ The TPT operations are used to interact with Teradata databases for DDL operations
61
+ such as creating, altering, or dropping tables and high-performance data loading and
62
+ DDL operations.
63
+
64
+ Features:
65
+ - Supports both local and remote execution of TPT operations.
66
+ - Secure file encryption for remote transfers.
67
+ - Comprehensive error handling and logging.
68
+ - Resource cleanup and management.
69
+
70
+ .. seealso::
71
+ - :ref:`hook API connection <howto/connection:teradata>`
72
+
73
+ :param ssh_conn_id: SSH connection ID for remote execution. If None, executes locally.
74
+ """
75
+
76
+ def __init__(self, ssh_conn_id: str | None = None, *args, **kwargs) -> None:
77
+ super().__init__(*args, **kwargs)
78
+ self.ssh_conn_id = ssh_conn_id
79
+ self.ssh_hook = SSHHook(ssh_conn_id=ssh_conn_id) if ssh_conn_id else None
80
+
81
+ def execute_ddl(
82
+ self,
83
+ tpt_script: str | list[str],
84
+ remote_working_dir: str,
85
+ ) -> int:
86
+ """
87
+ Execute a DDL statement using TPT.
88
+
89
+ Args:
90
+ tpt_script: TPT script content as string or list of strings
91
+ remote_working_dir: Remote working directory for SSH execution
92
+
93
+ Returns:
94
+ Exit code from the TPT operation
95
+
96
+ Raises:
97
+ ValueError: If tpt_script is empty or invalid
98
+ RuntimeError: Non-zero tbuild exit status or unexpected execution failure
99
+ ConnectionError: SSH connection not established or fails
100
+ TimeoutError: SSH connection/network timeout
101
+ FileNotFoundError: tbuild binary not found in PATH
102
+ """
103
+ if not tpt_script:
104
+ raise ValueError("TPT script must not be empty.")
105
+
106
+ tpt_script_content = "\n".join(tpt_script) if isinstance(tpt_script, list) else tpt_script
107
+
108
+ # Validate script content
109
+ if not tpt_script_content.strip():
110
+ raise ValueError("TPT script content must not be empty after processing.")
111
+
112
+ if self.ssh_hook:
113
+ self.log.info("Executing DDL statements via SSH on remote host")
114
+ return self._execute_tbuild_via_ssh(tpt_script_content, remote_working_dir)
115
+ self.log.info("Executing DDL statements locally")
116
+ return self._execute_tbuild_locally(tpt_script_content)
117
+
118
+ def _execute_tbuild_via_ssh(
119
+ self,
120
+ tpt_script_content: str,
121
+ remote_working_dir: str,
122
+ ) -> int:
123
+ """Execute tbuild command via SSH."""
124
+ with self.preferred_temp_directory() as tmp_dir:
125
+ local_script_file = os.path.join(tmp_dir, f"tbuild_script_{uuid.uuid4().hex}.sql")
126
+ write_file(local_script_file, tpt_script_content)
127
+ encrypted_file_path = f"{local_script_file}.enc"
128
+ remote_encrypted_script_file = os.path.join(
129
+ remote_working_dir, os.path.basename(encrypted_file_path)
130
+ )
131
+ remote_script_file = os.path.join(remote_working_dir, os.path.basename(local_script_file))
132
+ job_name = f"tbuild_job_{uuid.uuid4().hex}"
133
+
134
+ try:
135
+ if not self.ssh_hook:
136
+ raise ConnectionError("SSH connection is not established. `ssh_hook` is None or invalid.")
137
+ with self.ssh_hook.get_conn() as ssh_client:
138
+ verify_tpt_utility_on_remote_host(ssh_client, "tbuild", logging.getLogger(__name__))
139
+ password = generate_random_password()
140
+ generate_encrypted_file_with_openssl(local_script_file, password, encrypted_file_path)
141
+ transfer_file_sftp(
142
+ ssh_client,
143
+ encrypted_file_path,
144
+ remote_encrypted_script_file,
145
+ logging.getLogger(__name__),
146
+ )
147
+ decrypt_remote_file(
148
+ ssh_client,
149
+ remote_encrypted_script_file,
150
+ remote_script_file,
151
+ password,
152
+ logging.getLogger(__name__),
153
+ )
154
+
155
+ set_remote_file_permissions(ssh_client, remote_script_file, logging.getLogger(__name__))
156
+
157
+ tbuild_cmd = ["tbuild", "-f", remote_script_file, job_name]
158
+ self.log.info("Executing tbuild command on remote server: %s", " ".join(tbuild_cmd))
159
+ exit_status, output, error = execute_remote_command(ssh_client, " ".join(tbuild_cmd))
160
+ self.log.info("tbuild command output:\n%s", output)
161
+ self.log.info("tbuild command exited with status %s", exit_status)
162
+
163
+ # Clean up remote files before checking exit status
164
+ remote_secure_delete(
165
+ ssh_client,
166
+ [remote_encrypted_script_file, remote_script_file],
167
+ logging.getLogger(__name__),
168
+ )
169
+
170
+ if exit_status != 0:
171
+ raise RuntimeError(f"tbuild command failed with exit code {exit_status}: {error}")
172
+
173
+ return exit_status
174
+ except ConnectionError:
175
+ # Re-raise ConnectionError as-is (don't convert to TimeoutError)
176
+ raise
177
+ except (OSError, socket.gaierror) as e:
178
+ self.log.error("SSH connection timed out: %s", str(e))
179
+ raise TimeoutError(
180
+ "SSH connection timed out. Please check the network or server availability."
181
+ ) from e
182
+ except SSHException as e:
183
+ raise ConnectionError(f"SSH error during connection: {str(e)}") from e
184
+ except RuntimeError:
185
+ raise
186
+ except Exception as e:
187
+ raise RuntimeError(
188
+ f"Unexpected error while executing tbuild script on remote machine: {str(e)}"
189
+ ) from e
190
+ finally:
191
+ # Clean up local files
192
+ secure_delete(encrypted_file_path, logging.getLogger(__name__))
193
+ secure_delete(local_script_file, logging.getLogger(__name__))
194
+
195
+ def _execute_tbuild_locally(
196
+ self,
197
+ tpt_script_content: str,
198
+ ) -> int:
199
+ """Execute tbuild command locally."""
200
+ with self.preferred_temp_directory() as tmp_dir:
201
+ local_script_file = os.path.join(tmp_dir, f"tbuild_script_{uuid.uuid4().hex}.sql")
202
+ write_file(local_script_file, tpt_script_content)
203
+ # Set file permission to read-only for the current user (no permissions for group/others)
204
+ set_local_file_permissions(local_script_file, logging.getLogger(__name__))
205
+
206
+ job_name = f"tbuild_job_{uuid.uuid4().hex}"
207
+ tbuild_cmd = ["tbuild", "-f", local_script_file, job_name]
208
+
209
+ if not shutil.which("tbuild"):
210
+ raise FileNotFoundError("tbuild binary not found in PATH.")
211
+
212
+ sp = None
213
+ try:
214
+ self.log.info("Executing tbuild command: %s", " ".join(tbuild_cmd))
215
+ sp = subprocess.Popen(
216
+ tbuild_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, start_new_session=True
217
+ )
218
+ error_lines = []
219
+ if sp.stdout is not None:
220
+ for line in iter(sp.stdout.readline, b""):
221
+ decoded_line = line.decode("UTF-8").strip()
222
+ self.log.info(decoded_line)
223
+ if "error" in decoded_line.lower():
224
+ error_lines.append(decoded_line)
225
+ sp.wait()
226
+ self.log.info("tbuild command exited with return code %s", sp.returncode)
227
+ if sp.returncode != 0:
228
+ error_msg = "\n".join(error_lines) if error_lines else "Unknown error"
229
+ raise RuntimeError(f"tbuild command failed with return code {sp.returncode}: {error_msg}")
230
+ return sp.returncode
231
+ except RuntimeError:
232
+ raise
233
+ except Exception as e:
234
+ self.log.error("Error executing tbuild command: %s", str(e))
235
+ raise RuntimeError(f"Error executing tbuild command: {str(e)}") from e
236
+ finally:
237
+ secure_delete(local_script_file, logging.getLogger(__name__))
238
+ terminate_subprocess(sp, logging.getLogger(__name__))
239
+
240
+ def execute_tdload(
241
+ self,
242
+ remote_working_dir: str,
243
+ job_var_content: str | None = None,
244
+ tdload_options: str | None = None,
245
+ tdload_job_name: str | None = None,
246
+ ) -> int:
247
+ """
248
+ Execute a tdload operation using the tdload command-line utility.
249
+
250
+ Args:
251
+ remote_working_dir: Remote working directory for SSH execution
252
+ job_var_content: Content of the job variable file
253
+ tdload_options: Additional command-line options for tdload
254
+ tdload_job_name: Name for the tdload job
255
+
256
+ Returns:
257
+ Exit code from the tdload operation
258
+
259
+ Raises:
260
+ RuntimeError: Non-zero tdload exit status or unexpected execution failure
261
+ ConnectionError: SSH connection not established or fails
262
+ TimeoutError: SSH connection/network timeout
263
+ FileNotFoundError: tdload binary not found in PATH
264
+ """
265
+ tdload_job_name = tdload_job_name or f"tdload_job_{uuid.uuid4().hex}"
266
+ if self.ssh_hook:
267
+ self.log.info("Executing tdload via SSH on remote host with job name: %s", tdload_job_name)
268
+ return self._execute_tdload_via_ssh(
269
+ remote_working_dir, job_var_content, tdload_options, tdload_job_name
270
+ )
271
+ self.log.info("Executing tdload locally with job name: %s", tdload_job_name)
272
+ return self._execute_tdload_locally(job_var_content, tdload_options, tdload_job_name)
273
+
274
+ def _execute_tdload_via_ssh(
275
+ self,
276
+ remote_working_dir: str,
277
+ job_var_content: str | None,
278
+ tdload_options: str | None,
279
+ tdload_job_name: str | None,
280
+ ) -> int:
281
+ """
282
+ Write job_var_content to a temporary file, then transfer and execute it on the remote host.
283
+
284
+ Args:
285
+ remote_working_dir: Remote working directory
286
+ job_var_content: Content for the job variable file
287
+ tdload_options: Additional tdload command options
288
+ tdload_job_name: Name for the tdload job
289
+
290
+ Returns:
291
+ Exit code from the tdload operation
292
+ """
293
+ with self.preferred_temp_directory() as tmp_dir:
294
+ local_job_var_file = os.path.join(tmp_dir, f"tdload_job_var_{uuid.uuid4().hex}.txt")
295
+ write_file(local_job_var_file, job_var_content or "")
296
+ return self._transfer_to_and_execute_tdload_on_remote(
297
+ local_job_var_file, remote_working_dir, tdload_options, tdload_job_name
298
+ )
299
+
300
+ def _transfer_to_and_execute_tdload_on_remote(
301
+ self,
302
+ local_job_var_file: str,
303
+ remote_working_dir: str,
304
+ tdload_options: str | None,
305
+ tdload_job_name: str | None,
306
+ ) -> int:
307
+ """Transfer job variable file to remote host and execute tdload command."""
308
+ encrypted_file_path = f"{local_job_var_file}.enc"
309
+ remote_encrypted_job_file = os.path.join(remote_working_dir, os.path.basename(encrypted_file_path))
310
+ remote_job_file = os.path.join(remote_working_dir, os.path.basename(local_job_var_file))
311
+
312
+ try:
313
+ if not self.ssh_hook:
314
+ raise ConnectionError("SSH connection is not established. `ssh_hook` is None or invalid.")
315
+ with self.ssh_hook.get_conn() as ssh_client:
316
+ verify_tpt_utility_on_remote_host(ssh_client, "tdload", logging.getLogger(__name__))
317
+ password = generate_random_password()
318
+ generate_encrypted_file_with_openssl(local_job_var_file, password, encrypted_file_path)
319
+ transfer_file_sftp(
320
+ ssh_client, encrypted_file_path, remote_encrypted_job_file, logging.getLogger(__name__)
321
+ )
322
+ decrypt_remote_file(
323
+ ssh_client,
324
+ remote_encrypted_job_file,
325
+ remote_job_file,
326
+ password,
327
+ logging.getLogger(__name__),
328
+ )
329
+
330
+ set_remote_file_permissions(ssh_client, remote_job_file, logging.getLogger(__name__))
331
+
332
+ # Build tdload command more robustly
333
+ tdload_cmd = self._build_tdload_command(remote_job_file, tdload_options, tdload_job_name)
334
+
335
+ self.log.info("Executing tdload command on remote server: %s", " ".join(tdload_cmd))
336
+ exit_status, output, error = execute_remote_command(ssh_client, " ".join(tdload_cmd))
337
+ self.log.info("tdload command output:\n%s", output)
338
+ self.log.info("tdload command exited with status %s", exit_status)
339
+
340
+ # Clean up remote files before checking exit status
341
+ remote_secure_delete(
342
+ ssh_client, [remote_encrypted_job_file, remote_job_file], logging.getLogger(__name__)
343
+ )
344
+
345
+ if exit_status != 0:
346
+ raise RuntimeError(f"tdload command failed with exit code {exit_status}: {error}")
347
+
348
+ return exit_status
349
+ except ConnectionError:
350
+ # Re-raise ConnectionError as-is (don't convert to TimeoutError)
351
+ raise
352
+ except (OSError, socket.gaierror) as e:
353
+ self.log.error("SSH connection timed out: %s", str(e))
354
+ raise TimeoutError(
355
+ "SSH connection timed out. Please check the network or server availability."
356
+ ) from e
357
+ except SSHException as e:
358
+ raise ConnectionError(f"SSH error during connection: {str(e)}") from e
359
+ except RuntimeError:
360
+ raise
361
+ except Exception as e:
362
+ raise RuntimeError(
363
+ f"Unexpected error while executing tdload script on remote machine: {str(e)}"
364
+ ) from e
365
+ finally:
366
+ # Clean up local files
367
+ secure_delete(encrypted_file_path, logging.getLogger(__name__))
368
+ secure_delete(local_job_var_file, logging.getLogger(__name__))
369
+
370
+ def _execute_tdload_locally(
371
+ self,
372
+ job_var_content: str | None,
373
+ tdload_options: str | None,
374
+ tdload_job_name: str | None,
375
+ ) -> int:
376
+ """
377
+ Execute tdload command locally.
378
+
379
+ Args:
380
+ job_var_content: Content for the job variable file
381
+ tdload_options: Additional tdload command options
382
+ tdload_job_name: Name for the tdload job
383
+
384
+ Returns:
385
+ Exit code from the tdload operation
386
+ """
387
+ with self.preferred_temp_directory() as tmp_dir:
388
+ local_job_var_file = os.path.join(tmp_dir, f"tdload_job_var_{uuid.uuid4().hex}.txt")
389
+ write_file(local_job_var_file, job_var_content or "")
390
+
391
+ # Set file permission to read-only for the current user (no permissions for group/others)
392
+ set_local_file_permissions(local_job_var_file, logging.getLogger(__name__))
393
+
394
+ # Log file permissions for debugging purposes
395
+ file_permissions = oct(os.stat(local_job_var_file).st_mode & 0o777)
396
+ self.log.debug("Local job variable file permissions: %s", file_permissions)
397
+
398
+ # Build tdload command
399
+ tdload_cmd = self._build_tdload_command(local_job_var_file, tdload_options, tdload_job_name)
400
+
401
+ if not shutil.which("tdload"):
402
+ raise FileNotFoundError("tdload binary not found in PATH.")
403
+
404
+ sp = None
405
+ try:
406
+ # Print a visual separator for clarity in logs
407
+ self.log.info("Executing tdload command: %s", " ".join(tdload_cmd))
408
+ sp = subprocess.Popen(
409
+ tdload_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, start_new_session=True
410
+ )
411
+ error_lines = []
412
+ if sp.stdout is not None:
413
+ for line in iter(sp.stdout.readline, b""):
414
+ decoded_line = line.decode("UTF-8").strip()
415
+ self.log.info(decoded_line)
416
+ if "error" in decoded_line.lower():
417
+ error_lines.append(decoded_line)
418
+ sp.wait()
419
+ self.log.info("tdload command exited with return code %s", sp.returncode)
420
+ if sp.returncode != 0:
421
+ error_msg = "\n".join(error_lines) if error_lines else ""
422
+ if error_msg:
423
+ raise RuntimeError(
424
+ f"tdload command failed with return code {sp.returncode}:\n{error_msg}"
425
+ )
426
+ raise RuntimeError(f"tdload command failed with return code {sp.returncode}")
427
+ return sp.returncode
428
+ except RuntimeError:
429
+ raise
430
+ except Exception as e:
431
+ self.log.error("Error executing tdload command: %s", str(e))
432
+ raise RuntimeError(f"Error executing tdload command: {str(e)}") from e
433
+ finally:
434
+ secure_delete(local_job_var_file, logging.getLogger(__name__))
435
+ terminate_subprocess(sp, logging.getLogger(__name__))
436
+
437
+ def _build_tdload_command(
438
+ self, job_var_file: str, tdload_options: str | None, tdload_job_name: str | None
439
+ ) -> list[str]:
440
+ """
441
+ Build the tdload command with proper option handling.
442
+
443
+ Args:
444
+ job_var_file: Path to the job variable file
445
+ tdload_options: Additional tdload options as a space-separated string
446
+ tdload_job_name: Name for the tdload job
447
+
448
+ Returns:
449
+ List of command arguments for tdload
450
+ """
451
+ tdload_cmd = ["tdload", "-j", job_var_file]
452
+
453
+ # Add tdload_options if provided, with proper handling of quoted options
454
+ if tdload_options:
455
+ # Split options while preserving quoted arguments
456
+ import shlex
457
+
458
+ try:
459
+ parsed_options = shlex.split(tdload_options)
460
+ tdload_cmd.extend(parsed_options)
461
+ except ValueError as e:
462
+ self.log.warning(
463
+ "Failed to parse tdload_options using shlex, falling back to simple split: %s", str(e)
464
+ )
465
+ # Fallback to simple split if shlex parsing fails
466
+ tdload_cmd.extend(tdload_options.split())
467
+
468
+ # Add job name if provided (and not empty)
469
+ if tdload_job_name:
470
+ tdload_cmd.append(tdload_job_name)
471
+
472
+ return tdload_cmd
473
+
474
+ def on_kill(self) -> None:
475
+ """
476
+ Handle cleanup when the task is killed.
477
+
478
+ This method is called when Airflow needs to terminate the hook,
479
+ typically during task cancellation or shutdown.
480
+ """
481
+ self.log.info("TPT Hook cleanup initiated")
482
+ # Note: SSH connections are managed by context managers and will be cleaned up automatically
483
+ # Subprocesses are handled by terminate_subprocess in the finally blocks
484
+ # This method is available for future enhancements if needed
485
+
486
+ @contextmanager
487
+ def preferred_temp_directory(self, prefix: str = "tpt_") -> Generator[str, None, None]:
488
+ try:
489
+ temp_dir = tempfile.gettempdir()
490
+ if not os.path.isdir(temp_dir) or not os.access(temp_dir, os.W_OK):
491
+ raise OSError("OS temp dir not usable")
492
+ except Exception:
493
+ temp_dir = self.get_airflow_home_dir()
494
+ with tempfile.TemporaryDirectory(dir=temp_dir, prefix=prefix) as tmp:
495
+ yield tmp
496
+
497
+ def get_airflow_home_dir(self) -> str:
498
+ """Return the Airflow home directory."""
499
+ return os.environ.get("AIRFLOW_HOME", os.path.expanduser("~/airflow"))
@@ -21,8 +21,7 @@ import subprocess
21
21
  from abc import ABC
22
22
  from typing import Any
23
23
 
24
- from airflow.exceptions import AirflowException
25
- from airflow.providers.common.compat.sdk import BaseHook
24
+ from airflow.providers.common.compat.sdk import AirflowException, BaseHook
26
25
 
27
26
 
28
27
  class TtuHook(BaseHook, ABC):
@@ -39,7 +39,7 @@ from airflow.providers.teradata.triggers.teradata_compute_cluster import Teradat
39
39
  if TYPE_CHECKING:
40
40
  from airflow.providers.common.compat.sdk import Context
41
41
 
42
- from airflow.exceptions import AirflowException
42
+ from airflow.providers.common.compat.sdk import AirflowException
43
43
 
44
44
 
45
45
  # Represents